From a4268c9fdf17a279b35ad5f31ef857f01260a539 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 9 Oct 2025 03:00:21 +0000 Subject: [PATCH 1/4] feat: Add support for Python 3.14 fix: Deprecate credentials_file argument chore: Update gapic-generator-python to 1.28.0 PiperOrigin-RevId: 816753840 Source-Link: https://github.com/googleapis/googleapis/commit/d06cf27a47074d1de3fde6f0ca48680a96229306 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a524e7310882bbb99bfe1399b18bed328979211c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTUyNGU3MzEwODgyYmJiOTliZmUxMzk5YjE4YmVkMzI4OTc5MjExYyJ9 --- owl-bot-staging/spanner/v1/.coveragerc | 13 + owl-bot-staging/spanner/v1/.flake8 | 34 + owl-bot-staging/spanner/v1/LICENSE | 202 + owl-bot-staging/spanner/v1/MANIFEST.in | 20 + owl-bot-staging/spanner/v1/README.rst | 143 + .../spanner/v1/docs/_static/custom.css | 20 + .../spanner/v1/docs/_templates/layout.html | 50 + owl-bot-staging/spanner/v1/docs/conf.py | 385 + owl-bot-staging/spanner/v1/docs/index.rst | 10 + .../spanner/v1/docs/multiprocessing.rst | 7 + .../spanner/v1/docs/spanner_v1/services_.rst | 6 + .../spanner/v1/docs/spanner_v1/spanner.rst | 10 + .../spanner/v1/docs/spanner_v1/types_.rst | 6 + .../v1/google/cloud/spanner/__init__.py | 113 + .../v1/google/cloud/spanner/gapic_version.py | 16 + .../spanner/v1/google/cloud/spanner/py.typed | 2 + .../v1/google/cloud/spanner_v1/__init__.py | 114 + .../cloud/spanner_v1/gapic_metadata.json | 268 + .../google/cloud/spanner_v1/gapic_version.py | 16 + .../v1/google/cloud/spanner_v1/py.typed | 2 + .../cloud/spanner_v1/services/__init__.py | 15 + .../spanner_v1/services/spanner/__init__.py | 22 + .../services/spanner/async_client.py | 2125 ++ .../spanner_v1/services/spanner/client.py | 2486 ++ .../spanner_v1/services/spanner/pagers.py | 166 + .../services/spanner/transports/README.rst | 9 + .../services/spanner/transports/__init__.py | 38 + .../services/spanner/transports/base.py | 505 + .../services/spanner/transports/grpc.py | 899 + .../spanner/transports/grpc_asyncio.py | 1125 + .../services/spanner/transports/rest.py | 2898 ++ .../services/spanner/transports/rest_base.py | 817 + .../google/cloud/spanner_v1/types/__init__.py | 122 + .../cloud/spanner_v1/types/change_stream.py | 683 + .../cloud/spanner_v1/types/commit_response.py | 104 + .../v1/google/cloud/spanner_v1/types/keys.py | 248 + .../google/cloud/spanner_v1/types/mutation.py | 201 + .../cloud/spanner_v1/types/query_plan.py | 219 + .../cloud/spanner_v1/types/result_set.py | 379 + .../google/cloud/spanner_v1/types/spanner.py | 1782 ++ .../cloud/spanner_v1/types/transaction.py | 491 + .../v1/google/cloud/spanner_v1/types/type.py | 288 + owl-bot-staging/spanner/v1/mypy.ini | 3 + owl-bot-staging/spanner/v1/noxfile.py | 601 + .../snippet_metadata_google.spanner.v1.json | 2579 ++ ...ted_spanner_batch_create_sessions_async.py | 53 + ...ated_spanner_batch_create_sessions_sync.py | 53 + ..._v1_generated_spanner_batch_write_async.py | 57 + ...r_v1_generated_spanner_batch_write_sync.py | 57 + ...nerated_spanner_begin_transaction_async.py | 52 + ...enerated_spanner_begin_transaction_sync.py | 52 + ...anner_v1_generated_spanner_commit_async.py | 53 + ...panner_v1_generated_spanner_commit_sync.py | 53 + ..._generated_spanner_create_session_async.py | 52 + ...1_generated_spanner_create_session_sync.py | 52 + ..._generated_spanner_delete_session_async.py | 50 + ...1_generated_spanner_delete_session_sync.py | 50 + ...nerated_spanner_execute_batch_dml_async.py | 57 + ...enerated_spanner_execute_batch_dml_sync.py | 57 + ..._v1_generated_spanner_execute_sql_async.py | 53 + ...r_v1_generated_spanner_execute_sql_sync.py | 53 + ...ted_spanner_execute_streaming_sql_async.py | 54 + ...ated_spanner_execute_streaming_sql_sync.py | 54 + ..._v1_generated_spanner_get_session_async.py | 52 + ...r_v1_generated_spanner_get_session_sync.py | 52 + ...1_generated_spanner_list_sessions_async.py | 53 + ...v1_generated_spanner_list_sessions_sync.py | 53 + ...generated_spanner_partition_query_async.py | 53 + ..._generated_spanner_partition_query_sync.py | 53 + ..._generated_spanner_partition_read_async.py | 53 + ...1_generated_spanner_partition_read_sync.py | 53 + ...spanner_v1_generated_spanner_read_async.py | 54 + .../spanner_v1_generated_spanner_read_sync.py | 54 + ...ner_v1_generated_spanner_rollback_async.py | 51 + ...nner_v1_generated_spanner_rollback_sync.py | 51 + ..._generated_spanner_streaming_read_async.py | 55 + ...1_generated_spanner_streaming_read_sync.py | 55 + .../v1/scripts/fixup_spanner_v1_keywords.py | 191 + owl-bot-staging/spanner/v1/setup.py | 101 + .../spanner/v1/testing/constraints-3.10.txt | 8 + .../spanner/v1/testing/constraints-3.11.txt | 8 + .../spanner/v1/testing/constraints-3.12.txt | 8 + .../spanner/v1/testing/constraints-3.13.txt | 12 + .../spanner/v1/testing/constraints-3.14.txt | 12 + .../spanner/v1/testing/constraints-3.7.txt | 12 + .../spanner/v1/testing/constraints-3.8.txt | 8 + .../spanner/v1/testing/constraints-3.9.txt | 8 + owl-bot-staging/spanner/v1/tests/__init__.py | 16 + .../spanner/v1/tests/unit/__init__.py | 16 + .../spanner/v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/spanner_v1/__init__.py | 16 + .../unit/gapic/spanner_v1/test_spanner.py | 11446 ++++++++ .../spanner_admin_database/v1/.coveragerc | 13 + .../spanner_admin_database/v1/.flake8 | 34 + .../spanner_admin_database/v1/LICENSE | 202 + .../spanner_admin_database/v1/MANIFEST.in | 20 + .../spanner_admin_database/v1/README.rst | 143 + .../v1/docs/_static/custom.css | 20 + .../v1/docs/_templates/layout.html | 50 + .../spanner_admin_database/v1/docs/conf.py | 385 + .../spanner_admin_database/v1/docs/index.rst | 10 + .../v1/docs/multiprocessing.rst | 7 + .../database_admin.rst | 10 + .../spanner_admin_database_v1/services_.rst | 6 + .../docs/spanner_admin_database_v1/types_.rst | 6 + .../cloud/spanner_admin_database/__init__.py | 149 + .../spanner_admin_database/gapic_version.py | 16 + .../cloud/spanner_admin_database/py.typed | 2 + .../spanner_admin_database_v1/__init__.py | 150 + .../gapic_metadata.json | 433 + .../gapic_version.py | 16 + .../cloud/spanner_admin_database_v1/py.typed | 2 + .../services/__init__.py | 15 + .../services/database_admin/__init__.py | 22 + .../services/database_admin/async_client.py | 3968 +++ .../services/database_admin/client.py | 4387 +++ .../services/database_admin/pagers.py | 864 + .../database_admin/transports/README.rst | 9 + .../database_admin/transports/__init__.py | 38 + .../database_admin/transports/base.py | 795 + .../database_admin/transports/grpc.py | 1285 + .../database_admin/transports/grpc_asyncio.py | 1656 ++ .../database_admin/transports/rest.py | 5336 ++++ .../database_admin/transports/rest_base.py | 1378 + .../types/__init__.py | 148 + .../spanner_admin_database_v1/types/backup.py | 1097 + .../types/backup_schedule.py | 369 + .../spanner_admin_database_v1/types/common.py | 179 + .../types/spanner_database_admin.py | 1348 + .../spanner_admin_database/v1/mypy.ini | 3 + .../spanner_admin_database/v1/noxfile.py | 601 + ...data_google.spanner.admin.database.v1.json | 4480 ++++ ...d_database_admin_add_split_points_async.py | 52 + ...ed_database_admin_add_split_points_sync.py | 52 + ...erated_database_admin_copy_backup_async.py | 58 + ...nerated_database_admin_copy_backup_sync.py | 58 + ...ated_database_admin_create_backup_async.py | 57 + ...base_admin_create_backup_schedule_async.py | 53 + ...abase_admin_create_backup_schedule_sync.py | 53 + ...rated_database_admin_create_backup_sync.py | 57 + ...ed_database_admin_create_database_async.py | 57 + ...ted_database_admin_create_database_sync.py | 57 + ...ated_database_admin_delete_backup_async.py | 50 + ...base_admin_delete_backup_schedule_async.py | 50 + ...abase_admin_delete_backup_schedule_sync.py | 50 + ...rated_database_admin_delete_backup_sync.py | 50 + ...ated_database_admin_drop_database_async.py | 50 + ...rated_database_admin_drop_database_sync.py | 50 + ...nerated_database_admin_get_backup_async.py | 52 + ...atabase_admin_get_backup_schedule_async.py | 52 + ...database_admin_get_backup_schedule_sync.py | 52 + ...enerated_database_admin_get_backup_sync.py | 52 + ...rated_database_admin_get_database_async.py | 52 + ...d_database_admin_get_database_ddl_async.py | 52 + ...ed_database_admin_get_database_ddl_sync.py | 52 + ...erated_database_admin_get_database_sync.py | 52 + ...ted_database_admin_get_iam_policy_async.py | 53 + ...ated_database_admin_get_iam_policy_sync.py | 53 + ...n_internal_update_graph_operation_async.py | 54 + ...in_internal_update_graph_operation_sync.py | 54 + ...base_admin_list_backup_operations_async.py | 53 + ...abase_admin_list_backup_operations_sync.py | 53 + ...abase_admin_list_backup_schedules_async.py | 53 + ...tabase_admin_list_backup_schedules_sync.py | 53 + ...rated_database_admin_list_backups_async.py | 53 + ...erated_database_admin_list_backups_sync.py | 53 + ...se_admin_list_database_operations_async.py | 53 + ...ase_admin_list_database_operations_sync.py | 53 + ...atabase_admin_list_database_roles_async.py | 53 + ...database_admin_list_database_roles_sync.py | 53 + ...ted_database_admin_list_databases_async.py | 53 + ...ated_database_admin_list_databases_sync.py | 53 + ...d_database_admin_restore_database_async.py | 58 + ...ed_database_admin_restore_database_sync.py | 58 + ...ted_database_admin_set_iam_policy_async.py | 53 + ...ated_database_admin_set_iam_policy_sync.py | 53 + ...tabase_admin_test_iam_permissions_async.py | 54 + ...atabase_admin_test_iam_permissions_sync.py | 54 + ...ated_database_admin_update_backup_async.py | 51 + ...base_admin_update_backup_schedule_async.py | 51 + ...abase_admin_update_backup_schedule_sync.py | 51 + ...rated_database_admin_update_backup_sync.py | 51 + ...ed_database_admin_update_database_async.py | 59 + ...atabase_admin_update_database_ddl_async.py | 57 + ...database_admin_update_database_ddl_sync.py | 57 + ...ted_database_admin_update_database_sync.py | 59 + ...ixup_spanner_admin_database_v1_keywords.py | 202 + .../spanner_admin_database/v1/setup.py | 102 + .../v1/testing/constraints-3.10.txt | 9 + .../v1/testing/constraints-3.11.txt | 9 + .../v1/testing/constraints-3.12.txt | 9 + .../v1/testing/constraints-3.13.txt | 13 + .../v1/testing/constraints-3.14.txt | 13 + .../v1/testing/constraints-3.7.txt | 13 + .../v1/testing/constraints-3.8.txt | 9 + .../v1/testing/constraints-3.9.txt | 9 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../spanner_admin_database_v1/__init__.py | 16 + .../test_database_admin.py | 22226 ++++++++++++++++ .../spanner_admin_instance/v1/.coveragerc | 13 + .../spanner_admin_instance/v1/.flake8 | 34 + .../spanner_admin_instance/v1/LICENSE | 202 + .../spanner_admin_instance/v1/MANIFEST.in | 20 + .../spanner_admin_instance/v1/README.rst | 143 + .../v1/docs/_static/custom.css | 20 + .../v1/docs/_templates/layout.html | 50 + .../spanner_admin_instance/v1/docs/conf.py | 385 + .../spanner_admin_instance/v1/docs/index.rst | 10 + .../v1/docs/multiprocessing.rst | 7 + .../instance_admin.rst | 10 + .../spanner_admin_instance_v1/services_.rst | 6 + .../docs/spanner_admin_instance_v1/types_.rst | 6 + .../cloud/spanner_admin_instance/__init__.py | 109 + .../spanner_admin_instance/gapic_version.py | 16 + .../cloud/spanner_admin_instance/py.typed | 2 + .../spanner_admin_instance_v1/__init__.py | 110 + .../gapic_metadata.json | 343 + .../gapic_version.py | 16 + .../cloud/spanner_admin_instance_v1/py.typed | 2 + .../services/__init__.py | 15 + .../services/instance_admin/__init__.py | 22 + .../services/instance_admin/async_client.py | 3468 +++ .../services/instance_admin/client.py | 3849 +++ .../services/instance_admin/pagers.py | 723 + .../instance_admin/transports/README.rst | 9 + .../instance_admin/transports/__init__.py | 38 + .../instance_admin/transports/base.py | 567 + .../instance_admin/transports/grpc.py | 1359 + .../instance_admin/transports/grpc_asyncio.py | 1560 ++ .../instance_admin/transports/rest.py | 4462 ++++ .../instance_admin/transports/rest_base.py | 1152 + .../types/__init__.py | 104 + .../spanner_admin_instance_v1/types/common.py | 99 + .../types/spanner_instance_admin.py | 2366 ++ .../spanner_admin_instance/v1/mypy.ini | 3 + .../spanner_admin_instance/v1/noxfile.py | 601 + ...data_google.spanner.admin.instance.v1.json | 3450 +++ ...ed_instance_admin_create_instance_async.py | 63 + ...ance_admin_create_instance_config_async.py | 57 + ...tance_admin_create_instance_config_sync.py | 57 + ...e_admin_create_instance_partition_async.py | 64 + ...ce_admin_create_instance_partition_sync.py | 64 + ...ted_instance_admin_create_instance_sync.py | 63 + ...ed_instance_admin_delete_instance_async.py | 50 + ...ance_admin_delete_instance_config_async.py | 50 + ...tance_admin_delete_instance_config_sync.py | 50 + ...e_admin_delete_instance_partition_async.py | 50 + ...ce_admin_delete_instance_partition_sync.py | 50 + ...ted_instance_admin_delete_instance_sync.py | 50 + ...ted_instance_admin_get_iam_policy_async.py | 53 + ...ated_instance_admin_get_iam_policy_sync.py | 53 + ...rated_instance_admin_get_instance_async.py | 52 + ...nstance_admin_get_instance_config_async.py | 52 + ...instance_admin_get_instance_config_sync.py | 52 + ...ance_admin_get_instance_partition_async.py | 52 + ...tance_admin_get_instance_partition_sync.py | 52 + ...erated_instance_admin_get_instance_sync.py | 52 + ...n_list_instance_config_operations_async.py | 53 + ...in_list_instance_config_operations_sync.py | 53 + ...tance_admin_list_instance_configs_async.py | 53 + ...stance_admin_list_instance_configs_sync.py | 53 + ...ist_instance_partition_operations_async.py | 53 + ...list_instance_partition_operations_sync.py | 53 + ...ce_admin_list_instance_partitions_async.py | 53 + ...nce_admin_list_instance_partitions_sync.py | 53 + ...ted_instance_admin_list_instances_async.py | 53 + ...ated_instance_admin_list_instances_sync.py | 53 + ...ated_instance_admin_move_instance_async.py | 57 + ...rated_instance_admin_move_instance_sync.py | 57 + ...ted_instance_admin_set_iam_policy_async.py | 53 + ...ated_instance_admin_set_iam_policy_sync.py | 53 + ...stance_admin_test_iam_permissions_async.py | 54 + ...nstance_admin_test_iam_permissions_sync.py | 54 + ...ed_instance_admin_update_instance_async.py | 61 + ...ance_admin_update_instance_config_async.py | 55 + ...tance_admin_update_instance_config_sync.py | 55 + ...e_admin_update_instance_partition_async.py | 62 + ...ce_admin_update_instance_partition_sync.py | 62 + ...ted_instance_admin_update_instance_sync.py | 61 + ...ixup_spanner_admin_instance_v1_keywords.py | 196 + .../spanner_admin_instance/v1/setup.py | 102 + .../v1/testing/constraints-3.10.txt | 9 + .../v1/testing/constraints-3.11.txt | 9 + .../v1/testing/constraints-3.12.txt | 9 + .../v1/testing/constraints-3.13.txt | 13 + .../v1/testing/constraints-3.14.txt | 13 + .../v1/testing/constraints-3.7.txt | 13 + .../v1/testing/constraints-3.8.txt | 9 + .../v1/testing/constraints-3.9.txt | 9 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../spanner_admin_instance_v1/__init__.py | 16 + .../test_instance_admin.py | 17636 ++++++++++++ 296 files changed, 134773 insertions(+) create mode 100644 owl-bot-staging/spanner/v1/.coveragerc create mode 100644 owl-bot-staging/spanner/v1/.flake8 create mode 100644 owl-bot-staging/spanner/v1/LICENSE create mode 100644 owl-bot-staging/spanner/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner/v1/README.rst create mode 100644 owl-bot-staging/spanner/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/spanner/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/spanner/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py create mode 100644 owl-bot-staging/spanner/v1/mypy.ini create mode 100644 owl-bot-staging/spanner/v1/noxfile.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py create mode 100644 owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py create mode 100644 owl-bot-staging/spanner/v1/setup.py create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.14.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/spanner/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/.coveragerc create mode 100644 owl-bot-staging/spanner_admin_database/v1/.flake8 create mode 100644 owl-bot-staging/spanner_admin_database/v1/LICENSE create mode 100644 owl-bot-staging/spanner_admin_database/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner_admin_database/v1/README.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/mypy.ini create mode 100644 owl-bot-staging/spanner_admin_database/v1/noxfile.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/setup.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/.coveragerc create mode 100644 owl-bot-staging/spanner_admin_instance/v1/.flake8 create mode 100644 owl-bot-staging/spanner_admin_instance/v1/LICENSE create mode 100644 owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner_admin_instance/v1/README.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/mypy.ini create mode 100644 owl-bot-staging/spanner_admin_instance/v1/noxfile.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/setup.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py diff --git a/owl-bot-staging/spanner/v1/.coveragerc b/owl-bot-staging/spanner/v1/.coveragerc new file mode 100644 index 0000000000..677a992799 --- /dev/null +++ b/owl-bot-staging/spanner/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner/__init__.py + google/cloud/spanner/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/spanner/v1/.flake8 b/owl-bot-staging/spanner/v1/.flake8 new file mode 100644 index 0000000000..90316de214 --- /dev/null +++ b/owl-bot-staging/spanner/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/spanner/v1/LICENSE b/owl-bot-staging/spanner/v1/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/owl-bot-staging/spanner/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/spanner/v1/MANIFEST.in b/owl-bot-staging/spanner/v1/MANIFEST.in new file mode 100644 index 0000000000..dae249ec89 --- /dev/null +++ b/owl-bot-staging/spanner/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/spanner/v1/README.rst b/owl-bot-staging/spanner/v1/README.rst new file mode 100644 index 0000000000..502ebce1c4 --- /dev/null +++ b/owl-bot-staging/spanner/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Spanner API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner/v1/docs/_static/custom.css b/owl-bot-staging/spanner/v1/docs/_static/custom.css new file mode 100644 index 0000000000..b0a295464b --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/spanner/v1/docs/_templates/layout.html b/owl-bot-staging/spanner/v1/docs/_templates/layout.html new file mode 100644 index 0000000000..95e9c77fcf --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/spanner/v1/docs/conf.py b/owl-bot-staging/spanner/v1/docs/conf.py new file mode 100644 index 0000000000..010a6b6cda --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-spanner", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-spanner.tex", + u"google-cloud-spanner Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-spanner", + "google-cloud-spanner Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-spanner", + "google-cloud-spanner Documentation", + author, + "google-cloud-spanner", + "google-cloud-spanner Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner/v1/docs/index.rst b/owl-bot-staging/spanner/v1/docs/index.rst new file mode 100644 index 0000000000..1162c85acc --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_v1/services_ + spanner_v1/types_ diff --git a/owl-bot-staging/spanner/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner/v1/docs/multiprocessing.rst new file mode 100644 index 0000000000..536d17b2ea --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst new file mode 100644 index 0000000000..3bbbb55f79 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner v1 API +======================================== +.. toctree:: + :maxdepth: 2 + + spanner diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst new file mode 100644 index 0000000000..b51f4447e4 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst @@ -0,0 +1,10 @@ +Spanner +------------------------- + +.. automodule:: google.cloud.spanner_v1.services.spanner + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_v1.services.spanner.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst new file mode 100644 index 0000000000..c7ff7e6c71 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Spanner v1 API +===================================== + +.. automodule:: google.cloud.spanner_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py new file mode 100644 index 0000000000..4757e26d69 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.spanner_v1.services.spanner.client import SpannerClient +from google.cloud.spanner_v1.services.spanner.async_client import SpannerAsyncClient + +from google.cloud.spanner_v1.types.change_stream import ChangeStreamRecord +from google.cloud.spanner_v1.types.commit_response import CommitResponse +from google.cloud.spanner_v1.types.keys import KeyRange +from google.cloud.spanner_v1.types.keys import KeySet +from google.cloud.spanner_v1.types.mutation import Mutation +from google.cloud.spanner_v1.types.query_plan import PlanNode +from google.cloud.spanner_v1.types.query_plan import QueryPlan +from google.cloud.spanner_v1.types.result_set import PartialResultSet +from google.cloud.spanner_v1.types.result_set import ResultSet +from google.cloud.spanner_v1.types.result_set import ResultSetMetadata +from google.cloud.spanner_v1.types.result_set import ResultSetStats +from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsRequest +from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsResponse +from google.cloud.spanner_v1.types.spanner import BatchWriteRequest +from google.cloud.spanner_v1.types.spanner import BatchWriteResponse +from google.cloud.spanner_v1.types.spanner import BeginTransactionRequest +from google.cloud.spanner_v1.types.spanner import CommitRequest +from google.cloud.spanner_v1.types.spanner import CreateSessionRequest +from google.cloud.spanner_v1.types.spanner import DeleteSessionRequest +from google.cloud.spanner_v1.types.spanner import DirectedReadOptions +from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlRequest +from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlResponse +from google.cloud.spanner_v1.types.spanner import ExecuteSqlRequest +from google.cloud.spanner_v1.types.spanner import GetSessionRequest +from google.cloud.spanner_v1.types.spanner import ListSessionsRequest +from google.cloud.spanner_v1.types.spanner import ListSessionsResponse +from google.cloud.spanner_v1.types.spanner import Partition +from google.cloud.spanner_v1.types.spanner import PartitionOptions +from google.cloud.spanner_v1.types.spanner import PartitionQueryRequest +from google.cloud.spanner_v1.types.spanner import PartitionReadRequest +from google.cloud.spanner_v1.types.spanner import PartitionResponse +from google.cloud.spanner_v1.types.spanner import ReadRequest +from google.cloud.spanner_v1.types.spanner import RequestOptions +from google.cloud.spanner_v1.types.spanner import RollbackRequest +from google.cloud.spanner_v1.types.spanner import Session +from google.cloud.spanner_v1.types.transaction import MultiplexedSessionPrecommitToken +from google.cloud.spanner_v1.types.transaction import Transaction +from google.cloud.spanner_v1.types.transaction import TransactionOptions +from google.cloud.spanner_v1.types.transaction import TransactionSelector +from google.cloud.spanner_v1.types.type import StructType +from google.cloud.spanner_v1.types.type import Type +from google.cloud.spanner_v1.types.type import TypeAnnotationCode +from google.cloud.spanner_v1.types.type import TypeCode + +__all__ = ('SpannerClient', + 'SpannerAsyncClient', + 'ChangeStreamRecord', + 'CommitResponse', + 'KeyRange', + 'KeySet', + 'Mutation', + 'PlanNode', + 'QueryPlan', + 'PartialResultSet', + 'ResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + 'BeginTransactionRequest', + 'CommitRequest', + 'CreateSessionRequest', + 'DeleteSessionRequest', + 'DirectedReadOptions', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'ExecuteSqlRequest', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'Partition', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'PartitionResponse', + 'ReadRequest', + 'RequestOptions', + 'RollbackRequest', + 'Session', + 'MultiplexedSessionPrecommitToken', + 'Transaction', + 'TransactionOptions', + 'TransactionSelector', + 'StructType', + 'Type', + 'TypeAnnotationCode', + 'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed new file mode 100644 index 0000000000..0989eccd04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py new file mode 100644 index 0000000000..6275c8acfb --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.spanner import SpannerClient +from .services.spanner import SpannerAsyncClient + +from .types.change_stream import ChangeStreamRecord +from .types.commit_response import CommitResponse +from .types.keys import KeyRange +from .types.keys import KeySet +from .types.mutation import Mutation +from .types.query_plan import PlanNode +from .types.query_plan import QueryPlan +from .types.result_set import PartialResultSet +from .types.result_set import ResultSet +from .types.result_set import ResultSetMetadata +from .types.result_set import ResultSetStats +from .types.spanner import BatchCreateSessionsRequest +from .types.spanner import BatchCreateSessionsResponse +from .types.spanner import BatchWriteRequest +from .types.spanner import BatchWriteResponse +from .types.spanner import BeginTransactionRequest +from .types.spanner import CommitRequest +from .types.spanner import CreateSessionRequest +from .types.spanner import DeleteSessionRequest +from .types.spanner import DirectedReadOptions +from .types.spanner import ExecuteBatchDmlRequest +from .types.spanner import ExecuteBatchDmlResponse +from .types.spanner import ExecuteSqlRequest +from .types.spanner import GetSessionRequest +from .types.spanner import ListSessionsRequest +from .types.spanner import ListSessionsResponse +from .types.spanner import Partition +from .types.spanner import PartitionOptions +from .types.spanner import PartitionQueryRequest +from .types.spanner import PartitionReadRequest +from .types.spanner import PartitionResponse +from .types.spanner import ReadRequest +from .types.spanner import RequestOptions +from .types.spanner import RollbackRequest +from .types.spanner import Session +from .types.transaction import MultiplexedSessionPrecommitToken +from .types.transaction import Transaction +from .types.transaction import TransactionOptions +from .types.transaction import TransactionSelector +from .types.type import StructType +from .types.type import Type +from .types.type import TypeAnnotationCode +from .types.type import TypeCode + +__all__ = ( + 'SpannerAsyncClient', +'BatchCreateSessionsRequest', +'BatchCreateSessionsResponse', +'BatchWriteRequest', +'BatchWriteResponse', +'BeginTransactionRequest', +'ChangeStreamRecord', +'CommitRequest', +'CommitResponse', +'CreateSessionRequest', +'DeleteSessionRequest', +'DirectedReadOptions', +'ExecuteBatchDmlRequest', +'ExecuteBatchDmlResponse', +'ExecuteSqlRequest', +'GetSessionRequest', +'KeyRange', +'KeySet', +'ListSessionsRequest', +'ListSessionsResponse', +'MultiplexedSessionPrecommitToken', +'Mutation', +'PartialResultSet', +'Partition', +'PartitionOptions', +'PartitionQueryRequest', +'PartitionReadRequest', +'PartitionResponse', +'PlanNode', +'QueryPlan', +'ReadRequest', +'RequestOptions', +'ResultSet', +'ResultSetMetadata', +'ResultSetStats', +'RollbackRequest', +'Session', +'SpannerClient', +'StructType', +'Transaction', +'TransactionOptions', +'TransactionSelector', +'Type', +'TypeAnnotationCode', +'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json new file mode 100644 index 0000000000..f5957c633a --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json @@ -0,0 +1,268 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_v1", + "protoPackage": "google.spanner.v1", + "schema": "1.0", + "services": { + "Spanner": { + "clients": { + "grpc": { + "libraryClient": "SpannerClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SpannerAsyncClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + }, + "rest": { + "libraryClient": "SpannerClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed new file mode 100644 index 0000000000..0989eccd04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py new file mode 100644 index 0000000000..cbf94b283c --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py new file mode 100644 index 0000000000..a7de7a7b93 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SpannerClient +from .async_client import SpannerAsyncClient + +__all__ = ( + 'SpannerClient', + 'SpannerAsyncClient', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py new file mode 100644 index 0000000000..e3fe8cabac --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py @@ -0,0 +1,2125 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union + +from google.cloud.spanner_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport +from .client import SpannerClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class SpannerAsyncClient: + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + _client: SpannerClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SpannerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SpannerClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SpannerClient._DEFAULT_UNIVERSE + + database_path = staticmethod(SpannerClient.database_path) + parse_database_path = staticmethod(SpannerClient.parse_database_path) + session_path = staticmethod(SpannerClient.session_path) + parse_session_path = staticmethod(SpannerClient.parse_session_path) + common_billing_account_path = staticmethod(SpannerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(SpannerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(SpannerClient.common_folder_path) + parse_common_folder_path = staticmethod(SpannerClient.parse_common_folder_path) + common_organization_path = staticmethod(SpannerClient.common_organization_path) + parse_common_organization_path = staticmethod(SpannerClient.parse_common_organization_path) + common_project_path = staticmethod(SpannerClient.common_project_path) + parse_common_project_path = staticmethod(SpannerClient.parse_common_project_path) + common_location_path = staticmethod(SpannerClient.common_location_path) + parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SpannerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SpannerTransport: + """Returns the transport used by the client instance. + + Returns: + SpannerTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SpannerClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spanner async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SpannerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SpannerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner_v1.SpannerAsyncClient`.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.spanner.v1.Spanner", + "credentialsType": None, + } + ) + + async def create_session(self, + request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_create_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = await client.create_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]]): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + database (:class:`str`): + Required. The database in which the + new session is created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CreateSessionRequest): + request = spanner.CreateSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_create_sessions(self, + request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + session_count: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.BatchCreateSessionsResponse: + r"""Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = await client.batch_create_sessions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]]): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + database (:class:`str`): + Required. The database in which the + new sessions are created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_count (:class:`int`): + Required. The number of sessions to be created in this + batch call. The API can return fewer than the requested + number of sessions. If a specific number of sessions are + desired, the client can make additional calls to + ``BatchCreateSessions`` (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + + This corresponds to the ``session_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, session_count] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchCreateSessionsRequest): + request = spanner.BatchCreateSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if session_count is not None: + request.session_count = session_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_create_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_session(self, + request: Optional[Union[spanner.GetSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_get_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]]): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + name (:class:`str`): + Required. The name of the session to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.GetSessionRequest): + request = spanner.GetSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sessions(self, + request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists all sessions in a given database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]]): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + database (:class:`str`): + Required. The database in which to + list sessions. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ListSessionsRequest): + request = spanner.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_session(self, + request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + await client.delete_session(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]]): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + name (:class:`str`): + Required. The name of the session to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.DeleteSessionRequest): + request = spanner.DeleteSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def execute_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_streaming_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: + r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = await client.execute_streaming_sql(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.execute_streaming_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def execute_batch_dml(self, + request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = await client.execute_batch_dml(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]]): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, + one for each DML statement that has successfully + executed, in the same order as the statements in the + request. If a statement fails, the status in the + response body identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. 2. If the status was not OK, check the + number of result sets in the response. If the + response contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed + successfully. + + \* Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. + + Example 2: + + - Request: 5 DML statements. The third statement has + a syntax error. + + \* Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement failed, + and the fourth and fifth statements were not + executed. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteBatchDmlRequest): + request = spanner.ExecuteBatchDmlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.execute_batch_dml] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = await client.read(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: + r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = await client.streaming_read(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.streaming_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def begin_transaction(self, + request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, + *, + session: Optional[str] = None, + options: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> transaction.Transaction: + r"""Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = await client.begin_transaction(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]]): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + session (:class:`str`): + Required. The session in which the + transaction runs. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + options (:class:`google.cloud.spanner_v1.types.TransactionOptions`): + Required. Options for the new + transaction. + + This corresponds to the ``options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Transaction: + A transaction. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, options] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BeginTransactionRequest): + request = spanner.BeginTransactionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if options is not None: + request.options = options + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.begin_transaction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def commit(self, + request: Optional[Union[spanner.CommitRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + mutations: Optional[MutableSequence[mutation.Mutation]] = None, + single_use_transaction: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> commit_response.CommitResponse: + r"""Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_commit(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = await client.commit(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.CommitRequest, dict]]): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + session (:class:`str`): + Required. The session in which the + transaction to be committed is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (:class:`bytes`): + Commit a previously-started + transaction. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (:class:`MutableSequence[google.cloud.spanner_v1.types.Mutation]`): + The mutations to be executed when + this transaction commits. All mutations + are applied atomically, in the order + they appear in this list. + + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + single_use_transaction (:class:`google.cloud.spanner_v1.types.TransactionOptions`): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, + commit with a temporary transaction is non-idempotent. + That is, if the ``CommitRequest`` is sent to Cloud + Spanner more than once (for instance, due to retries in + the application, or in the transport library), it's + possible that the mutations are executed more than once. + If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This corresponds to the ``single_use_transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id, mutations, single_use_transaction] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CommitRequest): + request = spanner.CommitRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + if single_use_transaction is not None: + request.single_use_transaction = single_use_transaction + if mutations: + request.mutations.extend(mutations) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.commit] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rollback(self, + request: Optional[Union[spanner.RollbackRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_rollback(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + await client.rollback(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.RollbackRequest, dict]]): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + session (:class:`str`): + Required. The session in which the + transaction to roll back is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (:class:`bytes`): + Required. The transaction to roll + back. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.RollbackRequest): + request = spanner.RollbackRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def partition_query(self, + request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.partition_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]]): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionQueryRequest): + request = spanner.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.partition_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def partition_read(self, + request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = await client.partition_read(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]]): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionReadRequest): + request = spanner.PartitionReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.partition_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_write(self, + request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, + *, + session: Optional[str] = None, + mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[spanner.BatchWriteResponse]]: + r"""Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = await client.batch_write(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]]): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + session (:class:`str`): + Required. The session in which the + batch request is to be run. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutation_groups (:class:`MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]`): + Required. The groups of mutations to + be applied. + + This corresponds to the ``mutation_groups`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.BatchWriteResponse]: + The result of applying a batch of + mutations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, mutation_groups] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchWriteRequest): + request = spanner.BatchWriteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if mutation_groups: + request.mutation_groups.extend(mutation_groups) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SpannerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "SpannerAsyncClient", +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py new file mode 100644 index 0000000000..bd4ac8451f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py @@ -0,0 +1,2486 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.spanner_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SpannerGrpcTransport +from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport +from .transports.rest import SpannerRestTransport + + +class SpannerClientMeta(type): + """Metaclass for the Spanner client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] + _transport_registry["grpc"] = SpannerGrpcTransport + _transport_registry["grpc_asyncio"] = SpannerGrpcAsyncIOTransport + _transport_registry["rest"] = SpannerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SpannerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SpannerClient(metaclass=SpannerClientMeta): + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SpannerTransport: + """Returns the transport used by the client instance. + + Returns: + SpannerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def database_path(project: str,instance: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def session_path(project: str,instance: str,database: str,session: str,) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str,str]: + """Parses a session path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/sessions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = SpannerClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = SpannerClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SpannerClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spanner client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SpannerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = SpannerClient._read_environment_variables() + self._client_cert_source = SpannerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = SpannerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SpannerTransport) + if transport_provided: + # transport is a SpannerTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SpannerTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + SpannerClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[SpannerTransport], Callable[..., SpannerTransport]] = ( + SpannerClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SpannerTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner_v1.SpannerClient`.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.spanner.v1.Spanner", + "credentialsType": None, + } + ) + + def create_session(self, + request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_create_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = client.create_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + database (str): + Required. The database in which the + new session is created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CreateSessionRequest): + request = spanner.CreateSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_sessions(self, + request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + session_count: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.BatchCreateSessionsResponse: + r"""Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = client.batch_create_sessions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + database (str): + Required. The database in which the + new sessions are created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_count (int): + Required. The number of sessions to be created in this + batch call. The API can return fewer than the requested + number of sessions. If a specific number of sessions are + desired, the client can make additional calls to + ``BatchCreateSessions`` (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + + This corresponds to the ``session_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, session_count] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchCreateSessionsRequest): + request = spanner.BatchCreateSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if session_count is not None: + request.session_count = session_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_session(self, + request: Optional[Union[spanner.GetSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_get_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + name (str): + Required. The name of the session to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.GetSessionRequest): + request = spanner.GetSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sessions(self, + request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists all sessions in a given database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + database (str): + Required. The database in which to + list sessions. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ListSessionsRequest): + request = spanner.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_session(self, + request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + client.delete_session(request=request) + + Args: + request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + name (str): + Required. The name of the session to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.DeleteSessionRequest): + request = spanner.DeleteSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def execute_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_streaming_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[result_set.PartialResultSet]: + r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = client.execute_streaming_sql(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_streaming_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_batch_dml(self, + request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = client.execute_batch_dml(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, + one for each DML statement that has successfully + executed, in the same order as the statements in the + request. If a statement fails, the status in the + response body identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. 2. If the status was not OK, check the + number of result sets in the response. If the + response contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed + successfully. + + \* Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. + + Example 2: + + - Request: 5 DML statements. The third statement has + a syntax error. + + \* Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement failed, + and the fourth and fifth statements were not + executed. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteBatchDmlRequest): + request = spanner.ExecuteBatchDmlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_batch_dml] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = client.read(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[result_set.PartialResultSet]: + r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = client.streaming_read(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def begin_transaction(self, + request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, + *, + session: Optional[str] = None, + options: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> transaction.Transaction: + r"""Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + session (str): + Required. The session in which the + transaction runs. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + options (google.cloud.spanner_v1.types.TransactionOptions): + Required. Options for the new + transaction. + + This corresponds to the ``options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Transaction: + A transaction. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, options] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BeginTransactionRequest): + request = spanner.BeginTransactionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if options is not None: + request.options = options + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.begin_transaction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def commit(self, + request: Optional[Union[spanner.CommitRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + mutations: Optional[MutableSequence[mutation.Mutation]] = None, + single_use_transaction: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> commit_response.CommitResponse: + r"""Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_commit(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + session (str): + Required. The session in which the + transaction to be committed is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (bytes): + Commit a previously-started + transaction. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + The mutations to be executed when + this transaction commits. All mutations + are applied atomically, in the order + they appear in this list. + + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, + commit with a temporary transaction is non-idempotent. + That is, if the ``CommitRequest`` is sent to Cloud + Spanner more than once (for instance, due to retries in + the application, or in the transport library), it's + possible that the mutations are executed more than once. + If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This corresponds to the ``single_use_transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id, mutations, single_use_transaction] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CommitRequest): + request = spanner.CommitRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + if mutations is not None: + request.mutations = mutations + if single_use_transaction is not None: + request.single_use_transaction = single_use_transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rollback(self, + request: Optional[Union[spanner.RollbackRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_rollback(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + client.rollback(request=request) + + Args: + request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + session (str): + Required. The session in which the + transaction to roll back is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (bytes): + Required. The transaction to roll + back. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.RollbackRequest): + request = spanner.RollbackRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def partition_query(self, + request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.partition_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionQueryRequest): + request = spanner.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def partition_read(self, + request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = client.partition_read(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionReadRequest): + request = spanner.PartitionReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_write(self, + request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, + *, + session: Optional[str] = None, + mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[spanner.BatchWriteResponse]: + r"""Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = client.batch_write(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + session (str): + Required. The session in which the + batch request is to be run. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): + Required. The groups of mutations to + be applied. + + This corresponds to the ``mutation_groups`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]: + The result of applying a batch of + mutations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, mutation_groups] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchWriteRequest): + request = spanner.BatchWriteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if mutation_groups is not None: + request.mutation_groups = mutation_groups + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SpannerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "SpannerClient", +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py new file mode 100644 index 0000000000..0969af8f6f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_v1.types import spanner + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner.ListSessionsResponse], + request: spanner.ListSessionsRequest, + response: spanner.ListSessionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.spanner_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner.ListSessionsResponse]], + request: spanner.ListSessionsRequest, + response: spanner.ListSessionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.spanner_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst new file mode 100644 index 0000000000..99997401d5 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SpannerTransport` is the ABC for all transports. +- public child `SpannerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SpannerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSpannerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SpannerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py new file mode 100644 index 0000000000..ee0b1ae37f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SpannerTransport +from .grpc import SpannerGrpcTransport +from .grpc_asyncio import SpannerGrpcAsyncIOTransport +from .rest import SpannerRestTransport +from .rest import SpannerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] +_transport_registry['grpc'] = SpannerGrpcTransport +_transport_registry['grpc_asyncio'] = SpannerGrpcAsyncIOTransport +_transport_registry['rest'] = SpannerRestTransport + +__all__ = ( + 'SpannerTransport', + 'SpannerGrpcTransport', + 'SpannerGrpcAsyncIOTransport', + 'SpannerRestTransport', + 'SpannerRestInterceptor', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py new file mode 100644 index 0000000000..0c7b14364b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -0,0 +1,505 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.spanner_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SpannerTransport(abc.ABC): + """Abstract transport class for Spanner.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_session: gapic_v1.method.wrap_method( + self.create_session, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_create_sessions: gapic_v1.method.wrap_method( + self.batch_create_sessions, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_session: gapic_v1.method.wrap_method( + self.get_session, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_session: gapic_v1.method.wrap_method( + self.delete_session, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_sql: gapic_v1.method.wrap_method( + self.execute_sql, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_streaming_sql: gapic_v1.method.wrap_method( + self.execute_streaming_sql, + default_timeout=3600.0, + client_info=client_info, + ), + self.execute_batch_dml: gapic_v1.method.wrap_method( + self.execute_batch_dml, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.read: gapic_v1.method.wrap_method( + self.read, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.streaming_read: gapic_v1.method.wrap_method( + self.streaming_read, + default_timeout=3600.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method.wrap_method( + self.begin_transaction, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.commit: gapic_v1.method.wrap_method( + self.commit, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.rollback: gapic_v1.method.wrap_method( + self.rollback, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_query: gapic_v1.method.wrap_method( + self.partition_query, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_read: gapic_v1.method.wrap_method( + self.partition_read, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_write: gapic_v1.method.wrap_method( + self.batch_write, + default_timeout=3600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + Union[ + spanner.Session, + Awaitable[spanner.Session] + ]]: + raise NotImplementedError() + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + Union[ + spanner.BatchCreateSessionsResponse, + Awaitable[spanner.BatchCreateSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + Union[ + spanner.Session, + Awaitable[spanner.Session] + ]]: + raise NotImplementedError() + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + Union[ + spanner.ListSessionsResponse, + Awaitable[spanner.ListSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Union[ + result_set.ResultSet, + Awaitable[result_set.ResultSet] + ]]: + raise NotImplementedError() + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Union[ + result_set.PartialResultSet, + Awaitable[result_set.PartialResultSet] + ]]: + raise NotImplementedError() + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + Union[ + spanner.ExecuteBatchDmlResponse, + Awaitable[spanner.ExecuteBatchDmlResponse] + ]]: + raise NotImplementedError() + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + Union[ + result_set.ResultSet, + Awaitable[result_set.ResultSet] + ]]: + raise NotImplementedError() + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + Union[ + result_set.PartialResultSet, + Awaitable[result_set.PartialResultSet] + ]]: + raise NotImplementedError() + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + Union[ + transaction.Transaction, + Awaitable[transaction.Transaction] + ]]: + raise NotImplementedError() + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + Union[ + commit_response.CommitResponse, + Awaitable[commit_response.CommitResponse] + ]]: + raise NotImplementedError() + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + Union[ + spanner.PartitionResponse, + Awaitable[spanner.PartitionResponse] + ]]: + raise NotImplementedError() + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + Union[ + spanner.PartitionResponse, + Awaitable[spanner.PartitionResponse] + ]]: + raise NotImplementedError() + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + Union[ + spanner.BatchWriteResponse, + Awaitable[spanner.BatchWriteResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'SpannerTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py new file mode 100644 index 0000000000..064199f57e --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -0,0 +1,899 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore +from .base import SpannerTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SpannerGrpcTransport(SpannerTransport): + """gRPC backend transport for Spanner. + + Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + spanner.Session]: + r"""Return a callable for the create session method over gRPC. + + Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + Returns: + Callable[[~.CreateSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_session' not in self._stubs: + self._stubs['create_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=spanner.CreateSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['create_session'] + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + spanner.BatchCreateSessionsResponse]: + r"""Return a callable for the batch create sessions method over gRPC. + + Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Returns: + Callable[[~.BatchCreateSessionsRequest], + ~.BatchCreateSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_sessions' not in self._stubs: + self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=spanner.BatchCreateSessionsRequest.serialize, + response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, + ) + return self._stubs['batch_create_sessions'] + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + spanner.Session]: + r"""Return a callable for the get session method over gRPC. + + Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + Returns: + Callable[[~.GetSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_session' not in self._stubs: + self._stubs['get_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=spanner.GetSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['get_session'] + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + spanner.ListSessionsResponse]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all sessions in a given database. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=spanner.ListSessionsRequest.serialize, + response_deserializer=spanner.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete session method over gRPC. + + Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + Returns: + Callable[[~.DeleteSessionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_session' not in self._stubs: + self._stubs['delete_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=spanner.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_session'] + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.ResultSet]: + r"""Return a callable for the execute sql method over gRPC. + + Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.ResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_sql' not in self._stubs: + self._stubs['execute_sql'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['execute_sql'] + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.PartialResultSet]: + r"""Return a callable for the execute streaming sql method over gRPC. + + Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.PartialResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_streaming_sql' not in self._stubs: + self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['execute_streaming_sql'] + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + spanner.ExecuteBatchDmlResponse]: + r"""Return a callable for the execute batch dml method over gRPC. + + Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Returns: + Callable[[~.ExecuteBatchDmlRequest], + ~.ExecuteBatchDmlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_batch_dml' not in self._stubs: + self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=spanner.ExecuteBatchDmlRequest.serialize, + response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, + ) + return self._stubs['execute_batch_dml'] + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + result_set.ResultSet]: + r"""Return a callable for the read method over gRPC. + + Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Returns: + Callable[[~.ReadRequest], + ~.ResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'read' not in self._stubs: + self._stubs['read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['read'] + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + result_set.PartialResultSet]: + r"""Return a callable for the streaming read method over gRPC. + + Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Returns: + Callable[[~.ReadRequest], + ~.PartialResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_read' not in self._stubs: + self._stubs['streaming_read'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['streaming_read'] + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + transaction.Transaction]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Returns: + Callable[[~.BeginTransactionRequest], + ~.Transaction]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=spanner.BeginTransactionRequest.serialize, + response_deserializer=transaction.Transaction.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + commit_response.CommitResponse]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Returns: + Callable[[~.CommitRequest], + ~.CommitResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=spanner.CommitRequest.serialize, + response_deserializer=commit_response.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + empty_pb2.Empty]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + Returns: + Callable[[~.RollbackRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=spanner.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + spanner.PartitionResponse]: + r"""Return a callable for the partition query method over gRPC. + + Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionQueryRequest], + ~.PartitionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=spanner.PartitionQueryRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + spanner.PartitionResponse]: + r"""Return a callable for the partition read method over gRPC. + + Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionReadRequest], + ~.PartitionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_read' not in self._stubs: + self._stubs['partition_read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=spanner.PartitionReadRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_read'] + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + spanner.BatchWriteResponse]: + r"""Return a callable for the batch write method over gRPC. + + Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + Returns: + Callable[[~.BatchWriteRequest], + ~.BatchWriteResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_write' not in self._stubs: + self._stubs['batch_write'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/BatchWrite', + request_serializer=spanner.BatchWriteRequest.serialize, + response_deserializer=spanner.BatchWriteResponse.deserialize, + ) + return self._stubs['batch_write'] + + def close(self): + self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'SpannerGrpcTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py new file mode 100644 index 0000000000..3efcf7071f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -0,0 +1,1125 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore +from .base import SpannerTransport, DEFAULT_CLIENT_INFO +from .grpc import SpannerGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SpannerGrpcAsyncIOTransport(SpannerTransport): + """gRPC AsyncIO backend transport for Spanner. + + Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + Awaitable[spanner.Session]]: + r"""Return a callable for the create session method over gRPC. + + Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + Returns: + Callable[[~.CreateSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_session' not in self._stubs: + self._stubs['create_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=spanner.CreateSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['create_session'] + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + Awaitable[spanner.BatchCreateSessionsResponse]]: + r"""Return a callable for the batch create sessions method over gRPC. + + Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Returns: + Callable[[~.BatchCreateSessionsRequest], + Awaitable[~.BatchCreateSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_sessions' not in self._stubs: + self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=spanner.BatchCreateSessionsRequest.serialize, + response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, + ) + return self._stubs['batch_create_sessions'] + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + Awaitable[spanner.Session]]: + r"""Return a callable for the get session method over gRPC. + + Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + Returns: + Callable[[~.GetSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_session' not in self._stubs: + self._stubs['get_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=spanner.GetSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['get_session'] + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + Awaitable[spanner.ListSessionsResponse]]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all sessions in a given database. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=spanner.ListSessionsRequest.serialize, + response_deserializer=spanner.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete session method over gRPC. + + Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + Returns: + Callable[[~.DeleteSessionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_session' not in self._stubs: + self._stubs['delete_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=spanner.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_session'] + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Awaitable[result_set.ResultSet]]: + r"""Return a callable for the execute sql method over gRPC. + + Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.ResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_sql' not in self._stubs: + self._stubs['execute_sql'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['execute_sql'] + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Awaitable[result_set.PartialResultSet]]: + r"""Return a callable for the execute streaming sql method over gRPC. + + Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.PartialResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_streaming_sql' not in self._stubs: + self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['execute_streaming_sql'] + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + Awaitable[spanner.ExecuteBatchDmlResponse]]: + r"""Return a callable for the execute batch dml method over gRPC. + + Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Returns: + Callable[[~.ExecuteBatchDmlRequest], + Awaitable[~.ExecuteBatchDmlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_batch_dml' not in self._stubs: + self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=spanner.ExecuteBatchDmlRequest.serialize, + response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, + ) + return self._stubs['execute_batch_dml'] + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + Awaitable[result_set.ResultSet]]: + r"""Return a callable for the read method over gRPC. + + Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Returns: + Callable[[~.ReadRequest], + Awaitable[~.ResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'read' not in self._stubs: + self._stubs['read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['read'] + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + Awaitable[result_set.PartialResultSet]]: + r"""Return a callable for the streaming read method over gRPC. + + Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Returns: + Callable[[~.ReadRequest], + Awaitable[~.PartialResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_read' not in self._stubs: + self._stubs['streaming_read'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['streaming_read'] + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + Awaitable[transaction.Transaction]]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Returns: + Callable[[~.BeginTransactionRequest], + Awaitable[~.Transaction]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=spanner.BeginTransactionRequest.serialize, + response_deserializer=transaction.Transaction.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + Awaitable[commit_response.CommitResponse]]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Returns: + Callable[[~.CommitRequest], + Awaitable[~.CommitResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=spanner.CommitRequest.serialize, + response_deserializer=commit_response.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + Returns: + Callable[[~.RollbackRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=spanner.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + Awaitable[spanner.PartitionResponse]]: + r"""Return a callable for the partition query method over gRPC. + + Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionQueryRequest], + Awaitable[~.PartitionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=spanner.PartitionQueryRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + Awaitable[spanner.PartitionResponse]]: + r"""Return a callable for the partition read method over gRPC. + + Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionReadRequest], + Awaitable[~.PartitionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_read' not in self._stubs: + self._stubs['partition_read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=spanner.PartitionReadRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_read'] + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + Awaitable[spanner.BatchWriteResponse]]: + r"""Return a callable for the batch write method over gRPC. + + Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + Returns: + Callable[[~.BatchWriteRequest], + Awaitable[~.BatchWriteResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_write' not in self._stubs: + self._stubs['batch_write'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/BatchWrite', + request_serializer=spanner.BatchWriteRequest.serialize, + response_deserializer=spanner.BatchWriteResponse.deserialize, + ) + return self._stubs['batch_write'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_session: self._wrap_method( + self.create_session, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_create_sessions: self._wrap_method( + self.batch_create_sessions, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_session: self._wrap_method( + self.get_session, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sessions: self._wrap_method( + self.list_sessions, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_session: self._wrap_method( + self.delete_session, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_sql: self._wrap_method( + self.execute_sql, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_streaming_sql: self._wrap_method( + self.execute_streaming_sql, + default_timeout=3600.0, + client_info=client_info, + ), + self.execute_batch_dml: self._wrap_method( + self.execute_batch_dml, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.read: self._wrap_method( + self.read, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.streaming_read: self._wrap_method( + self.streaming_read, + default_timeout=3600.0, + client_info=client_info, + ), + self.begin_transaction: self._wrap_method( + self.begin_transaction, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.commit: self._wrap_method( + self.commit, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.rollback: self._wrap_method( + self.rollback, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_query: self._wrap_method( + self.partition_query, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_read: self._wrap_method( + self.partition_read, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_write: self._wrap_method( + self.batch_write, + default_timeout=3600.0, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'SpannerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py new file mode 100644 index 0000000000..206ddb9999 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py @@ -0,0 +1,2898 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + + +from .rest_base import _BaseSpannerRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SpannerRestInterceptor: + """Interceptor for Spanner. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SpannerRestTransport. + + .. code-block:: python + class MyCustomSpannerInterceptor(SpannerRestInterceptor): + def pre_batch_create_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_sessions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_write(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_write(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_begin_transaction(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_begin_transaction(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_commit(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_commit(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_session(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_execute_batch_dml(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_batch_dml(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_execute_sql(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_sql(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_execute_streaming_sql(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_streaming_sql(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_session(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sessions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_partition_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_partition_query(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_partition_read(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_partition_read(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_read(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_read(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_streaming_read(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_streaming_read(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SpannerRestTransport(interceptor=MyCustomSpannerInterceptor()) + client = SpannerClient(transport=transport) + + + """ + def pre_batch_create_sessions(self, request: spanner.BatchCreateSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for batch_create_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_batch_create_sessions(self, response: spanner.BatchCreateSessionsResponse) -> spanner.BatchCreateSessionsResponse: + """Post-rpc interceptor for batch_create_sessions + + DEPRECATED. Please use the `post_batch_create_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_batch_create_sessions` interceptor runs + before the `post_batch_create_sessions_with_metadata` interceptor. + """ + return response + + def post_batch_create_sessions_with_metadata(self, response: spanner.BatchCreateSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_batch_create_sessions_with_metadata` + interceptor in new development instead of the `post_batch_create_sessions` interceptor. + When both interceptors are used, this `post_batch_create_sessions_with_metadata` interceptor runs after the + `post_batch_create_sessions` interceptor. The (possibly modified) response returned by + `post_batch_create_sessions` will be passed to + `post_batch_create_sessions_with_metadata`. + """ + return response, metadata + + def pre_batch_write(self, request: spanner.BatchWriteRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchWriteRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for batch_write + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_batch_write(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for batch_write + + DEPRECATED. Please use the `post_batch_write_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_batch_write` interceptor runs + before the `post_batch_write_with_metadata` interceptor. + """ + return response + + def post_batch_write_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_write + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_batch_write_with_metadata` + interceptor in new development instead of the `post_batch_write` interceptor. + When both interceptors are used, this `post_batch_write_with_metadata` interceptor runs after the + `post_batch_write` interceptor. The (possibly modified) response returned by + `post_batch_write` will be passed to + `post_batch_write_with_metadata`. + """ + return response, metadata + + def pre_begin_transaction(self, request: spanner.BeginTransactionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for begin_transaction + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_begin_transaction(self, response: transaction.Transaction) -> transaction.Transaction: + """Post-rpc interceptor for begin_transaction + + DEPRECATED. Please use the `post_begin_transaction_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_begin_transaction` interceptor runs + before the `post_begin_transaction_with_metadata` interceptor. + """ + return response + + def post_begin_transaction_with_metadata(self, response: transaction.Transaction, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transaction.Transaction, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_begin_transaction_with_metadata` + interceptor in new development instead of the `post_begin_transaction` interceptor. + When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the + `post_begin_transaction` interceptor. The (possibly modified) response returned by + `post_begin_transaction` will be passed to + `post_begin_transaction_with_metadata`. + """ + return response, metadata + + def pre_commit(self, request: spanner.CommitRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for commit + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_commit(self, response: commit_response.CommitResponse) -> commit_response.CommitResponse: + """Post-rpc interceptor for commit + + DEPRECATED. Please use the `post_commit_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_commit` interceptor runs + before the `post_commit_with_metadata` interceptor. + """ + return response + + def post_commit_with_metadata(self, response: commit_response.CommitResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[commit_response.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_commit_with_metadata` + interceptor in new development instead of the `post_commit` interceptor. + When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the + `post_commit` interceptor. The (possibly modified) response returned by + `post_commit` will be passed to + `post_commit_with_metadata`. + """ + return response, metadata + + def pre_create_session(self, request: spanner.CreateSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CreateSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_create_session(self, response: spanner.Session) -> spanner.Session: + """Post-rpc interceptor for create_session + + DEPRECATED. Please use the `post_create_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_create_session` interceptor runs + before the `post_create_session_with_metadata` interceptor. + """ + return response + + def post_create_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_create_session_with_metadata` + interceptor in new development instead of the `post_create_session` interceptor. + When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the + `post_create_session` interceptor. The (possibly modified) response returned by + `post_create_session` will be passed to + `post_create_session_with_metadata`. + """ + return response, metadata + + def pre_delete_session(self, request: spanner.DeleteSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.DeleteSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def pre_execute_batch_dml(self, request: spanner.ExecuteBatchDmlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for execute_batch_dml + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_execute_batch_dml(self, response: spanner.ExecuteBatchDmlResponse) -> spanner.ExecuteBatchDmlResponse: + """Post-rpc interceptor for execute_batch_dml + + DEPRECATED. Please use the `post_execute_batch_dml_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_execute_batch_dml` interceptor runs + before the `post_execute_batch_dml_with_metadata` interceptor. + """ + return response + + def post_execute_batch_dml_with_metadata(self, response: spanner.ExecuteBatchDmlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_batch_dml + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_execute_batch_dml_with_metadata` + interceptor in new development instead of the `post_execute_batch_dml` interceptor. + When both interceptors are used, this `post_execute_batch_dml_with_metadata` interceptor runs after the + `post_execute_batch_dml` interceptor. The (possibly modified) response returned by + `post_execute_batch_dml` will be passed to + `post_execute_batch_dml_with_metadata`. + """ + return response, metadata + + def pre_execute_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for execute_sql + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_execute_sql(self, response: result_set.ResultSet) -> result_set.ResultSet: + """Post-rpc interceptor for execute_sql + + DEPRECATED. Please use the `post_execute_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_execute_sql` interceptor runs + before the `post_execute_sql_with_metadata` interceptor. + """ + return response + + def post_execute_sql_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_execute_sql_with_metadata` + interceptor in new development instead of the `post_execute_sql` interceptor. + When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the + `post_execute_sql` interceptor. The (possibly modified) response returned by + `post_execute_sql` will be passed to + `post_execute_sql_with_metadata`. + """ + return response, metadata + + def pre_execute_streaming_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for execute_streaming_sql + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_execute_streaming_sql(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for execute_streaming_sql + + DEPRECATED. Please use the `post_execute_streaming_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_execute_streaming_sql` interceptor runs + before the `post_execute_streaming_sql_with_metadata` interceptor. + """ + return response + + def post_execute_streaming_sql_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_streaming_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_execute_streaming_sql_with_metadata` + interceptor in new development instead of the `post_execute_streaming_sql` interceptor. + When both interceptors are used, this `post_execute_streaming_sql_with_metadata` interceptor runs after the + `post_execute_streaming_sql` interceptor. The (possibly modified) response returned by + `post_execute_streaming_sql` will be passed to + `post_execute_streaming_sql_with_metadata`. + """ + return response, metadata + + def pre_get_session(self, request: spanner.GetSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.GetSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_get_session(self, response: spanner.Session) -> spanner.Session: + """Post-rpc interceptor for get_session + + DEPRECATED. Please use the `post_get_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_get_session` interceptor runs + before the `post_get_session_with_metadata` interceptor. + """ + return response + + def post_get_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_get_session_with_metadata` + interceptor in new development instead of the `post_get_session` interceptor. + When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the + `post_get_session` interceptor. The (possibly modified) response returned by + `post_get_session` will be passed to + `post_get_session_with_metadata`. + """ + return response, metadata + + def pre_list_sessions(self, request: spanner.ListSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_list_sessions(self, response: spanner.ListSessionsResponse) -> spanner.ListSessionsResponse: + """Post-rpc interceptor for list_sessions + + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. + """ + return response + + def post_list_sessions_with_metadata(self, response: spanner.ListSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + + def pre_partition_query(self, request: spanner.PartitionQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for partition_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_partition_query(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: + """Post-rpc interceptor for partition_query + + DEPRECATED. Please use the `post_partition_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_partition_query` interceptor runs + before the `post_partition_query_with_metadata` interceptor. + """ + return response + + def post_partition_query_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for partition_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_partition_query_with_metadata` + interceptor in new development instead of the `post_partition_query` interceptor. + When both interceptors are used, this `post_partition_query_with_metadata` interceptor runs after the + `post_partition_query` interceptor. The (possibly modified) response returned by + `post_partition_query` will be passed to + `post_partition_query_with_metadata`. + """ + return response, metadata + + def pre_partition_read(self, request: spanner.PartitionReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for partition_read + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_partition_read(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: + """Post-rpc interceptor for partition_read + + DEPRECATED. Please use the `post_partition_read_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_partition_read` interceptor runs + before the `post_partition_read_with_metadata` interceptor. + """ + return response + + def post_partition_read_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for partition_read + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_partition_read_with_metadata` + interceptor in new development instead of the `post_partition_read` interceptor. + When both interceptors are used, this `post_partition_read_with_metadata` interceptor runs after the + `post_partition_read` interceptor. The (possibly modified) response returned by + `post_partition_read` will be passed to + `post_partition_read_with_metadata`. + """ + return response, metadata + + def pre_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for read + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_read(self, response: result_set.ResultSet) -> result_set.ResultSet: + """Post-rpc interceptor for read + + DEPRECATED. Please use the `post_read_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_read` interceptor runs + before the `post_read_with_metadata` interceptor. + """ + return response + + def post_read_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for read + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_read_with_metadata` + interceptor in new development instead of the `post_read` interceptor. + When both interceptors are used, this `post_read_with_metadata` interceptor runs after the + `post_read` interceptor. The (possibly modified) response returned by + `post_read` will be passed to + `post_read_with_metadata`. + """ + return response, metadata + + def pre_rollback(self, request: spanner.RollbackRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for rollback + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def pre_streaming_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for streaming_read + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_streaming_read(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for streaming_read + + DEPRECATED. Please use the `post_streaming_read_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_streaming_read` interceptor runs + before the `post_streaming_read_with_metadata` interceptor. + """ + return response + + def post_streaming_read_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for streaming_read + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_streaming_read_with_metadata` + interceptor in new development instead of the `post_streaming_read` interceptor. + When both interceptors are used, this `post_streaming_read_with_metadata` interceptor runs after the + `post_streaming_read` interceptor. The (possibly modified) response returned by + `post_streaming_read` will be passed to + `post_streaming_read_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class SpannerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SpannerRestInterceptor + + +class SpannerRestTransport(_BaseSpannerRestTransport): + """REST backend synchronous transport for Spanner. + + Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SpannerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SpannerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateSessions(_BaseSpannerRestTransport._BaseBatchCreateSessions, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.BatchCreateSessions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.BatchCreateSessionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.BatchCreateSessionsResponse: + r"""Call the batch create sessions method over HTTP. + + Args: + request (~.spanner.BatchCreateSessionsRequest): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + + http_options = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_http_options() + + request, metadata = self._interceptor.pre_batch_create_sessions(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.BatchCreateSessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchCreateSessions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._BatchCreateSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.BatchCreateSessionsResponse() + pb_resp = spanner.BatchCreateSessionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_batch_create_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_sessions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.BatchCreateSessionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.batch_create_sessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchCreateSessions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BatchWrite(_BaseSpannerRestTransport._BaseBatchWrite, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.BatchWrite") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__(self, + request: spanner.BatchWriteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the batch write method over HTTP. + + Args: + request (~.spanner.BatchWriteRequest): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.BatchWriteResponse: + The result of applying a batch of + mutations. + + """ + + http_options = _BaseSpannerRestTransport._BaseBatchWrite._get_http_options() + + request, metadata = self._interceptor.pre_batch_write(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseBatchWrite._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseBatchWrite._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseBatchWrite._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.BatchWrite", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchWrite", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._BatchWrite._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, spanner.BatchWriteResponse) + + resp = self._interceptor.post_batch_write(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_write_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.batch_write", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchWrite", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BeginTransaction(_BaseSpannerRestTransport._BaseBeginTransaction, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.BeginTransaction") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.BeginTransactionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> transaction.Transaction: + r"""Call the begin transaction method over HTTP. + + Args: + request (~.spanner.BeginTransactionRequest): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.transaction.Transaction: + A transaction. + """ + + http_options = _BaseSpannerRestTransport._BaseBeginTransaction._get_http_options() + + request, metadata = self._interceptor.pre_begin_transaction(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseBeginTransaction._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseBeginTransaction._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseBeginTransaction._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.BeginTransaction", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BeginTransaction", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._BeginTransaction._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transaction.Transaction() + pb_resp = transaction.Transaction.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_begin_transaction(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_begin_transaction_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = transaction.Transaction.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.begin_transaction", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BeginTransaction", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Commit(_BaseSpannerRestTransport._BaseCommit, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.Commit") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.CommitRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> commit_response.CommitResponse: + r"""Call the commit method over HTTP. + + Args: + request (~.spanner.CommitRequest): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.commit_response.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + + http_options = _BaseSpannerRestTransport._BaseCommit._get_http_options() + + request, metadata = self._interceptor.pre_commit(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseCommit._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseCommit._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseCommit._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.Commit", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Commit", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._Commit._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = commit_response.CommitResponse() + pb_resp = commit_response.CommitResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_commit(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = commit_response.CommitResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.commit", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Commit", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSession(_BaseSpannerRestTransport._BaseCreateSession, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.CreateSession") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.CreateSessionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.Session: + r"""Call the create session method over HTTP. + + Args: + request (~.spanner.CreateSessionRequest): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.Session: + A session in the Cloud Spanner API. + """ + + http_options = _BaseSpannerRestTransport._BaseCreateSession._get_http_options() + + request, metadata = self._interceptor.pre_create_session(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseCreateSession._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseCreateSession._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseCreateSession._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.CreateSession", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "CreateSession", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._CreateSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.Session() + pb_resp = spanner.Session.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.Session.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.create_session", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "CreateSession", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSession(_BaseSpannerRestTransport._BaseDeleteSession, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.DeleteSession") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner.DeleteSessionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete session method over HTTP. + + Args: + request (~.spanner.DeleteSessionRequest): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseSpannerRestTransport._BaseDeleteSession._get_http_options() + + request, metadata = self._interceptor.pre_delete_session(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseDeleteSession._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseDeleteSession._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.DeleteSession", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "DeleteSession", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._DeleteSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExecuteBatchDml(_BaseSpannerRestTransport._BaseExecuteBatchDml, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ExecuteBatchDml") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.ExecuteBatchDmlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Call the execute batch dml method over HTTP. + + Args: + request (~.spanner.ExecuteBatchDmlRequest): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.ExecuteBatchDmlResponse: + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, one + for each DML statement that has successfully executed, + in the same order as the statements in the request. If a + statement fails, the status in the response body + identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value ``OK`` + indicates that all statements were executed + successfully. + 2. If the status was not ``OK``, check the number of + result sets in the response. If the response contains + ``N`` [ResultSet][google.spanner.v1.ResultSet] + messages, then statement ``N+1`` in the request + failed. + + Example 1: + + - Request: 5 DML statements, all executed successfully. + - Response: 5 [ResultSet][google.spanner.v1.ResultSet] + messages, with the status ``OK``. + + Example 2: + + - Request: 5 DML statements. The third statement has a + syntax error. + - Response: 2 [ResultSet][google.spanner.v1.ResultSet] + messages, and a syntax error (``INVALID_ARGUMENT``) + status. The number of + [ResultSet][google.spanner.v1.ResultSet] messages + indicates that the third statement failed, and the + fourth and fifth statements were not executed. + + """ + + http_options = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_http_options() + + request, metadata = self._interceptor.pre_execute_batch_dml(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ExecuteBatchDml", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteBatchDml", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ExecuteBatchDml._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.ExecuteBatchDmlResponse() + pb_resp = spanner.ExecuteBatchDmlResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_execute_batch_dml(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_batch_dml_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.ExecuteBatchDmlResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.execute_batch_dml", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteBatchDml", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExecuteSql(_BaseSpannerRestTransport._BaseExecuteSql, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ExecuteSql") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.ExecuteSqlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> result_set.ResultSet: + r"""Call the execute sql method over HTTP. + + Args: + request (~.spanner.ExecuteSqlRequest): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + + http_options = _BaseSpannerRestTransport._BaseExecuteSql._get_http_options() + + request, metadata = self._interceptor.pre_execute_sql(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseExecuteSql._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseExecuteSql._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseExecuteSql._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ExecuteSql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteSql", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ExecuteSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = result_set.ResultSet() + pb_resp = result_set.ResultSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_execute_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_sql_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = result_set.ResultSet.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.execute_sql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteSql", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExecuteStreamingSql(_BaseSpannerRestTransport._BaseExecuteStreamingSql, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ExecuteStreamingSql") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__(self, + request: spanner.ExecuteSqlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the execute streaming sql method over HTTP. + + Args: + request (~.spanner.ExecuteSqlRequest): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.PartialResultSet: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + + http_options = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_http_options() + + request, metadata = self._interceptor.pre_execute_streaming_sql(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ExecuteStreamingSql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteStreamingSql", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ExecuteStreamingSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) + + resp = self._interceptor.post_execute_streaming_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_streaming_sql_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.execute_streaming_sql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteStreamingSql", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSession(_BaseSpannerRestTransport._BaseGetSession, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.GetSession") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner.GetSessionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.Session: + r"""Call the get session method over HTTP. + + Args: + request (~.spanner.GetSessionRequest): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.Session: + A session in the Cloud Spanner API. + """ + + http_options = _BaseSpannerRestTransport._BaseGetSession._get_http_options() + + request, metadata = self._interceptor.pre_get_session(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseGetSession._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseGetSession._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.GetSession", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "GetSession", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._GetSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.Session() + pb_resp = spanner.Session.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.Session.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.get_session", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "GetSession", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSessions(_BaseSpannerRestTransport._BaseListSessions, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ListSessions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner.ListSessionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.ListSessionsResponse: + r"""Call the list sessions method over HTTP. + + Args: + request (~.spanner.ListSessionsRequest): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.ListSessionsResponse: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + """ + + http_options = _BaseSpannerRestTransport._BaseListSessions._get_http_options() + + request, metadata = self._interceptor.pre_list_sessions(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseListSessions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseListSessions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ListSessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ListSessions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ListSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.ListSessionsResponse() + pb_resp = spanner.ListSessionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.ListSessionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.list_sessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ListSessions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _PartitionQuery(_BaseSpannerRestTransport._BasePartitionQuery, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.PartitionQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.PartitionQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.PartitionResponse: + r"""Call the partition query method over HTTP. + + Args: + request (~.spanner.PartitionQueryRequest): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.PartitionResponse: + The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + + http_options = _BaseSpannerRestTransport._BasePartitionQuery._get_http_options() + + request, metadata = self._interceptor.pre_partition_query(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BasePartitionQuery._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BasePartitionQuery._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BasePartitionQuery._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.PartitionQuery", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._PartitionQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.PartitionResponse() + pb_resp = spanner.PartitionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_partition_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_partition_query_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.PartitionResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.partition_query", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _PartitionRead(_BaseSpannerRestTransport._BasePartitionRead, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.PartitionRead") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.PartitionReadRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.PartitionResponse: + r"""Call the partition read method over HTTP. + + Args: + request (~.spanner.PartitionReadRequest): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.PartitionResponse: + The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + + http_options = _BaseSpannerRestTransport._BasePartitionRead._get_http_options() + + request, metadata = self._interceptor.pre_partition_read(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BasePartitionRead._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BasePartitionRead._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BasePartitionRead._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.PartitionRead", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionRead", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._PartitionRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.PartitionResponse() + pb_resp = spanner.PartitionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_partition_read(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_partition_read_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.PartitionResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.partition_read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionRead", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Read(_BaseSpannerRestTransport._BaseRead, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.Read") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.ReadRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> result_set.ResultSet: + r"""Call the read method over HTTP. + + Args: + request (~.spanner.ReadRequest): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + + http_options = _BaseSpannerRestTransport._BaseRead._get_http_options() + + request, metadata = self._interceptor.pre_read(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseRead._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseRead._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseRead._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.Read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Read", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._Read._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = result_set.ResultSet() + pb_resp = result_set.ResultSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_read(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_read_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = result_set.ResultSet.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Read", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Rollback(_BaseSpannerRestTransport._BaseRollback, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.Rollback") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.RollbackRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the rollback method over HTTP. + + Args: + request (~.spanner.RollbackRequest): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseSpannerRestTransport._BaseRollback._get_http_options() + + request, metadata = self._interceptor.pre_rollback(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseRollback._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseRollback._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseRollback._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.Rollback", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Rollback", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._Rollback._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _StreamingRead(_BaseSpannerRestTransport._BaseStreamingRead, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.StreamingRead") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__(self, + request: spanner.ReadRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the streaming read method over HTTP. + + Args: + request (~.spanner.ReadRequest): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.PartialResultSet: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + + http_options = _BaseSpannerRestTransport._BaseStreamingRead._get_http_options() + + request, metadata = self._interceptor.pre_streaming_read(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseStreamingRead._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseStreamingRead._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseStreamingRead._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.StreamingRead", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "StreamingRead", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._StreamingRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) + + resp = self._interceptor.post_streaming_read(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_streaming_read_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.streaming_read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "StreamingRead", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + spanner.BatchCreateSessionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateSessions(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + spanner.BatchWriteResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + transaction.Transaction]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + commit_response.CommitResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Commit(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + spanner.Session]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + spanner.ExecuteBatchDmlResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteBatchDml(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.ResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteSql(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.PartialResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteStreamingSql(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + spanner.Session]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + spanner.ListSessionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + spanner.PartitionResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + spanner.PartitionResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PartitionRead(self._session, self._host, self._interceptor) # type: ignore + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + result_set.ResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Read(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Rollback(self._session, self._host, self._interceptor) # type: ignore + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + result_set.PartialResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingRead(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SpannerRestTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py new file mode 100644 index 0000000000..79eb05d0cf --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py @@ -0,0 +1,817 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import SpannerTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + + +class _BaseSpannerRestTransport(SpannerTransport): + """Base REST backend transport for Spanner. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseBatchCreateSessions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.BatchCreateSessionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseBatchCreateSessions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchWrite: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.BatchWriteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseBatchWrite._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBeginTransaction: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.BeginTransactionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseBeginTransaction._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCommit: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.CommitRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseCommit._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSession: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.CreateSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseCreateSession._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSession: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.DeleteSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseDeleteSession._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExecuteBatchDml: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeBatchDml', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ExecuteBatchDmlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseExecuteBatchDml._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExecuteSql: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeSql', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ExecuteSqlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseExecuteSql._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExecuteStreamingSql: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeStreamingSql', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ExecuteSqlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseExecuteStreamingSql._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSession: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.GetSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseGetSession._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSessions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ListSessionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseListSessions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePartitionQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionQuery', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.PartitionQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BasePartitionQuery._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePartitionRead: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionRead', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.PartitionReadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BasePartitionRead._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRead: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:read', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ReadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseRead._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRollback: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.RollbackRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseRollback._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseStreamingRead: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:streamingRead', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ReadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseStreamingRead._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseSpannerRestTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py new file mode 100644 index 0000000000..d4c6938ee1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .change_stream import ( + ChangeStreamRecord, +) +from .commit_response import ( + CommitResponse, +) +from .keys import ( + KeyRange, + KeySet, +) +from .mutation import ( + Mutation, +) +from .query_plan import ( + PlanNode, + QueryPlan, +) +from .result_set import ( + PartialResultSet, + ResultSet, + ResultSetMetadata, + ResultSetStats, +) +from .spanner import ( + BatchCreateSessionsRequest, + BatchCreateSessionsResponse, + BatchWriteRequest, + BatchWriteResponse, + BeginTransactionRequest, + CommitRequest, + CreateSessionRequest, + DeleteSessionRequest, + DirectedReadOptions, + ExecuteBatchDmlRequest, + ExecuteBatchDmlResponse, + ExecuteSqlRequest, + GetSessionRequest, + ListSessionsRequest, + ListSessionsResponse, + Partition, + PartitionOptions, + PartitionQueryRequest, + PartitionReadRequest, + PartitionResponse, + ReadRequest, + RequestOptions, + RollbackRequest, + Session, +) +from .transaction import ( + MultiplexedSessionPrecommitToken, + Transaction, + TransactionOptions, + TransactionSelector, +) +from .type import ( + StructType, + Type, + TypeAnnotationCode, + TypeCode, +) + +__all__ = ( + 'ChangeStreamRecord', + 'CommitResponse', + 'KeyRange', + 'KeySet', + 'Mutation', + 'PlanNode', + 'QueryPlan', + 'PartialResultSet', + 'ResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + 'BeginTransactionRequest', + 'CommitRequest', + 'CreateSessionRequest', + 'DeleteSessionRequest', + 'DirectedReadOptions', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'ExecuteSqlRequest', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'Partition', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'PartitionResponse', + 'ReadRequest', + 'RequestOptions', + 'RollbackRequest', + 'Session', + 'MultiplexedSessionPrecommitToken', + 'Transaction', + 'TransactionOptions', + 'TransactionSelector', + 'StructType', + 'Type', + 'TypeAnnotationCode', + 'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py new file mode 100644 index 0000000000..8db7f85bb5 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py @@ -0,0 +1,683 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'ChangeStreamRecord', + }, +) + + +class ChangeStreamRecord(proto.Message): + r"""Spanner Change Streams enable customers to capture and stream out + changes to their Spanner databases in real-time. A change stream can + be created with option partition_mode='IMMUTABLE_KEY_RANGE' or + partition_mode='MUTABLE_KEY_RANGE'. + + This message is only used in Change Streams created with the option + partition_mode='MUTABLE_KEY_RANGE'. Spanner automatically creates a + special Table-Valued Function (TVF) along with each Change Streams. + The function provides access to the change stream's records. The + function is named READ\_ (where + is the name of the change stream), and it + returns a table with only one column called ChangeRecord. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_change_record (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord): + Data change record describing a data change + for a change stream partition. + + This field is a member of `oneof`_ ``record``. + heartbeat_record (google.cloud.spanner_v1.types.ChangeStreamRecord.HeartbeatRecord): + Heartbeat record describing a heartbeat for a + change stream partition. + + This field is a member of `oneof`_ ``record``. + partition_start_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionStartRecord): + Partition start record describing a new + change stream partition. + + This field is a member of `oneof`_ ``record``. + partition_end_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEndRecord): + Partition end record describing a terminated + change stream partition. + + This field is a member of `oneof`_ ``record``. + partition_event_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord): + Partition event record describing key range + changes for a change stream partition. + + This field is a member of `oneof`_ ``record``. + """ + + class DataChangeRecord(proto.Message): + r"""A data change record contains a set of changes to a table + with the same modification type (insert, update, or delete) + committed at the same commit timestamp in one change stream + partition for the same transaction. Multiple data change records + can be returned for the same transaction across multiple change + stream partitions. + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Indicates the timestamp in which the change was committed. + DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + + The record sequence number ordering across partitions is + only meaningful in the context of a specific transaction. + Record sequence numbers are unique across partitions for a + specific transaction. Sort the DataChangeRecords for the + same + [server_transaction_id][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.server_transaction_id] + by + [record_sequence][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.record_sequence] + to reconstruct the ordering of the changes within the + transaction. + server_transaction_id (str): + Provides a globally unique string that represents the + transaction in which the change was committed. Multiple + transactions can have the same commit timestamp, but each + transaction has a unique server_transaction_id. + is_last_record_in_transaction_in_partition (bool): + Indicates whether this is the last record for + a transaction in the current partition. Clients + can use this field to determine when all + records for a transaction in the current + partition have been received. + table (str): + Name of the table affected by the change. + column_metadata (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ColumnMetadata]): + Provides metadata describing the columns associated with the + [mods][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods] + listed below. + mods (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.Mod]): + Describes the changes that were made. + mod_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModType): + Describes the type of change. + value_capture_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ValueCaptureType): + Describes the value capture type that was + specified in the change stream configuration + when this change was captured. + number_of_records_in_transaction (int): + Indicates the number of data change records + that are part of this transaction across all + change stream partitions. This value can be used + to assemble all the records associated with a + particular transaction. + number_of_partitions_in_transaction (int): + Indicates the number of partitions that + return data change records for this transaction. + This value can be helpful in assembling all + records associated with a particular + transaction. + transaction_tag (str): + Indicates the transaction tag associated with + this transaction. + is_system_transaction (bool): + Indicates whether the transaction is a system + transaction. System transactions include those + issued by time-to-live (TTL), column backfill, + etc. + """ + class ModType(proto.Enum): + r"""Mod type describes the type of change Spanner applied to the data. + For example, if the client submits an INSERT_OR_UPDATE request, + Spanner will perform an insert if there is no existing row and + return ModType INSERT. Alternatively, if there is an existing row, + Spanner will perform an update and return ModType UPDATE. + + Values: + MOD_TYPE_UNSPECIFIED (0): + Not specified. + INSERT (10): + Indicates data was inserted. + UPDATE (20): + Indicates existing data was updated. + DELETE (30): + Indicates existing data was deleted. + """ + MOD_TYPE_UNSPECIFIED = 0 + INSERT = 10 + UPDATE = 20 + DELETE = 30 + + class ValueCaptureType(proto.Enum): + r"""Value capture type describes which values are recorded in the + data change record. + + Values: + VALUE_CAPTURE_TYPE_UNSPECIFIED (0): + Not specified. + OLD_AND_NEW_VALUES (10): + Records both old and new values of the + modified watched columns. + NEW_VALUES (20): + Records only new values of the modified + watched columns. + NEW_ROW (30): + Records new values of all watched columns, + including modified and unmodified columns. + NEW_ROW_AND_OLD_VALUES (40): + Records the new values of all watched + columns, including modified and unmodified + columns. Also records the old values of the + modified columns. + """ + VALUE_CAPTURE_TYPE_UNSPECIFIED = 0 + OLD_AND_NEW_VALUES = 10 + NEW_VALUES = 20 + NEW_ROW = 30 + NEW_ROW_AND_OLD_VALUES = 40 + + class ColumnMetadata(proto.Message): + r"""Metadata for a column. + + Attributes: + name (str): + Name of the column. + type_ (google.cloud.spanner_v1.types.Type): + Type of the column. + is_primary_key (bool): + Indicates whether the column is a primary key + column. + ordinal_position (int): + Ordinal position of the column based on the + original table definition in the schema starting + with a value of 1. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: gs_type.Type = proto.Field( + proto.MESSAGE, + number=2, + message=gs_type.Type, + ) + is_primary_key: bool = proto.Field( + proto.BOOL, + number=3, + ) + ordinal_position: int = proto.Field( + proto.INT64, + number=4, + ) + + class ModValue(proto.Message): + r"""Returns the value and associated metadata for a particular field of + the + [Mod][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod]. + + Attributes: + column_metadata_index (int): + Index within the repeated + [column_metadata][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.column_metadata] + field, to obtain the column metadata for the column that was + modified. + value (google.protobuf.struct_pb2.Value): + The value of the column. + """ + + column_metadata_index: int = proto.Field( + proto.INT32, + number=1, + ) + value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + class Mod(proto.Message): + r"""A mod describes all data changes in a watched table row. + + Attributes: + keys (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): + Returns the value of the primary key of the + modified row. + old_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): + Returns the old values before the change for the modified + columns. Always empty for + [INSERT][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.INSERT], + or if old values are not being captured specified by + [value_capture_type][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType]. + new_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): + Returns the new values after the change for the modified + columns. Always empty for + [DELETE][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.DELETE]. + """ + + keys: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ChangeStreamRecord.DataChangeRecord.ModValue', + ) + old_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ChangeStreamRecord.DataChangeRecord.ModValue', + ) + new_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ChangeStreamRecord.DataChangeRecord.ModValue', + ) + + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + server_transaction_id: str = proto.Field( + proto.STRING, + number=3, + ) + is_last_record_in_transaction_in_partition: bool = proto.Field( + proto.BOOL, + number=4, + ) + table: str = proto.Field( + proto.STRING, + number=5, + ) + column_metadata: MutableSequence['ChangeStreamRecord.DataChangeRecord.ColumnMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ChangeStreamRecord.DataChangeRecord.ColumnMetadata', + ) + mods: MutableSequence['ChangeStreamRecord.DataChangeRecord.Mod'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ChangeStreamRecord.DataChangeRecord.Mod', + ) + mod_type: 'ChangeStreamRecord.DataChangeRecord.ModType' = proto.Field( + proto.ENUM, + number=8, + enum='ChangeStreamRecord.DataChangeRecord.ModType', + ) + value_capture_type: 'ChangeStreamRecord.DataChangeRecord.ValueCaptureType' = proto.Field( + proto.ENUM, + number=9, + enum='ChangeStreamRecord.DataChangeRecord.ValueCaptureType', + ) + number_of_records_in_transaction: int = proto.Field( + proto.INT32, + number=10, + ) + number_of_partitions_in_transaction: int = proto.Field( + proto.INT32, + number=11, + ) + transaction_tag: str = proto.Field( + proto.STRING, + number=12, + ) + is_system_transaction: bool = proto.Field( + proto.BOOL, + number=13, + ) + + class HeartbeatRecord(proto.Message): + r"""A heartbeat record is returned as a progress indicator, when + there are no data changes or any other partition record types in + the change stream partition. + + Attributes: + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Indicates the timestamp at which the query + has returned all the records in the change + stream partition with timestamp <= heartbeat + timestamp. The heartbeat timestamp will not be + the same as the timestamps of other record types + in the same partition. + """ + + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + class PartitionStartRecord(proto.Message): + r"""A partition start record serves as a notification that the + client should schedule the partitions to be queried. + PartitionStartRecord returns information about one or more + partitions. + + Attributes: + start_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Start timestamp at which the partitions should be queried to + return change stream records with timestamps >= + start_timestamp. DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + partition_tokens (MutableSequence[str]): + Unique partition identifiers to be used in + queries. + """ + + start_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + partition_tokens: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + class PartitionEndRecord(proto.Message): + r"""A partition end record serves as a notification that the + client should stop reading the partition. No further records are + expected to be retrieved on it. + + Attributes: + end_timestamp (google.protobuf.timestamp_pb2.Timestamp): + End timestamp at which the change stream partition is + terminated. All changes generated by this partition will + have timestamps <= end_timestamp. + DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. PartitionEndRecord is the last record + returned for a partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + partition_token (str): + Unique partition identifier describing the terminated change + stream partition. + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.partition_token] + is equal to the partition token of the change stream + partition currently queried to return this + PartitionEndRecord. + """ + + end_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + partition_token: str = proto.Field( + proto.STRING, + number=3, + ) + + class PartitionEventRecord(proto.Message): + r"""A partition event record describes key range changes for a change + stream partition. The changes to a row defined by its primary key + can be captured in one change stream partition for a specific time + range, and then be captured in a different change stream partition + for a different time range. This movement of key ranges across + change stream partitions is a reflection of activities, such as + Spanner's dynamic splitting and load balancing, etc. Processing this + event is needed if users want to guarantee processing of the changes + for any key in timestamp order. If time ordered processing of + changes for a primary key is not needed, this event can be ignored. + To guarantee time ordered processing for each primary key, if the + event describes move-ins, the reader of this partition needs to wait + until the readers of the source partitions have processed all + records with timestamps <= this + PartitionEventRecord.commit_timestamp, before advancing beyond this + PartitionEventRecord. If the event describes move-outs, the reader + can notify the readers of the destination partitions that they can + continue processing. + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Indicates the commit timestamp at which the key range change + occurred. DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + partition_token (str): + Unique partition identifier describing the partition this + event occurred on. + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + is equal to the partition token of the change stream + partition currently queried to return this + PartitionEventRecord. + move_in_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveInEvent]): + Set when one or more key ranges are moved into the change + stream partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + Example: Two key ranges are moved into partition (P1) from + partition (P2) and partition (P3) in a single transaction at + timestamp T. + + The PartitionEventRecord returned in P1 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P1" move_in_events { source_partition_token: "P2" } + move_in_events { source_partition_token: "P3" } } + + The PartitionEventRecord returned in P2 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P2" move_out_events { destination_partition_token: "P1" } } + + The PartitionEventRecord returned in P3 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P3" move_out_events { destination_partition_token: "P1" } } + move_out_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent]): + Set when one or more key ranges are moved out of the change + stream partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + Example: Two key ranges are moved out of partition (P1) to + partition (P2) and partition (P3) in a single transaction at + timestamp T. + + The PartitionEventRecord returned in P1 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P1" move_out_events { destination_partition_token: "P2" } + move_out_events { destination_partition_token: "P3" } } + + The PartitionEventRecord returned in P2 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P2" move_in_events { source_partition_token: "P1" } } + + The PartitionEventRecord returned in P3 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P3" move_in_events { source_partition_token: "P1" } } + """ + + class MoveInEvent(proto.Message): + r"""Describes move-in of the key ranges into the change stream partition + identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + To maintain processing the changes for a particular key in timestamp + order, the query processing the change stream partition identified + by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + should not advance beyond the partition event record commit + timestamp until the queries processing the source change stream + partitions have processed all change stream records with timestamps + <= the partition event record commit timestamp. + + Attributes: + source_partition_token (str): + An unique partition identifier describing the + source change stream partition that recorded + changes for the key range that is moving into + this partition. + """ + + source_partition_token: str = proto.Field( + proto.STRING, + number=1, + ) + + class MoveOutEvent(proto.Message): + r"""Describes move-out of the key ranges out of the change stream + partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + To maintain processing the changes for a particular key in timestamp + order, the query processing the + [MoveOutEvent][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent] + in the partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + should inform the queries processing the destination partitions that + they can unblock and proceed processing records past the + [commit_timestamp][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.commit_timestamp]. + + Attributes: + destination_partition_token (str): + An unique partition identifier describing the + destination change stream partition that will + record changes for the key range that is moving + out of this partition. + """ + + destination_partition_token: str = proto.Field( + proto.STRING, + number=1, + ) + + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + partition_token: str = proto.Field( + proto.STRING, + number=3, + ) + move_in_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveInEvent'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ChangeStreamRecord.PartitionEventRecord.MoveInEvent', + ) + move_out_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveOutEvent'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='ChangeStreamRecord.PartitionEventRecord.MoveOutEvent', + ) + + data_change_record: DataChangeRecord = proto.Field( + proto.MESSAGE, + number=1, + oneof='record', + message=DataChangeRecord, + ) + heartbeat_record: HeartbeatRecord = proto.Field( + proto.MESSAGE, + number=2, + oneof='record', + message=HeartbeatRecord, + ) + partition_start_record: PartitionStartRecord = proto.Field( + proto.MESSAGE, + number=3, + oneof='record', + message=PartitionStartRecord, + ) + partition_end_record: PartitionEndRecord = proto.Field( + proto.MESSAGE, + number=4, + oneof='record', + message=PartitionEndRecord, + ) + partition_event_record: PartitionEventRecord = proto.Field( + proto.MESSAGE, + number=5, + oneof='record', + message=PartitionEventRecord, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py new file mode 100644 index 0000000000..3771ef1f71 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import transaction +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'CommitResponse', + }, +) + + +class CommitResponse(proto.Message): + r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + The Cloud Spanner timestamp at which the + transaction committed. + commit_stats (google.cloud.spanner_v1.types.CommitResponse.CommitStats): + The statistics about this ``Commit``. Not returned by + default. For more information, see + [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + If specified, transaction has not committed + yet. You must retry the commit with the new + precommit token. + + This field is a member of `oneof`_ ``MultiplexedSessionRetry``. + snapshot_timestamp (google.protobuf.timestamp_pb2.Timestamp): + If ``TransactionOptions.isolation_level`` is set to + ``IsolationLevel.REPEATABLE_READ``, then the snapshot + timestamp is the timestamp at which all reads in the + transaction ran. This timestamp is never returned. + """ + + class CommitStats(proto.Message): + r"""Additional statistics about a commit. + + Attributes: + mutation_count (int): + The total number of mutations for the transaction. Knowing + the ``mutation_count`` value can help you maximize the + number of mutations in a transaction and minimize the number + of API round trips. You can also monitor this value to + prevent transactions from exceeding the system + `limit `__. + If the number of mutations exceeds the limit, the server + returns + `INVALID_ARGUMENT `__. + """ + + mutation_count: int = proto.Field( + proto.INT64, + number=1, + ) + + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + commit_stats: CommitStats = proto.Field( + proto.MESSAGE, + number=2, + message=CommitStats, + ) + precommit_token: transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=4, + oneof='MultiplexedSessionRetry', + message=transaction.MultiplexedSessionPrecommitToken, + ) + snapshot_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py new file mode 100644 index 0000000000..54f745c9b7 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'KeyRange', + 'KeySet', + }, +) + + +class KeyRange(proto.Message): + r"""KeyRange represents a range of rows in a table or index. + + A range has a start key and an end key. These keys can be open or + closed, indicating if the range includes rows with that key. + + Keys are represented by lists, where the ith value in the list + corresponds to the ith component of the table or index primary key. + Individual values are encoded as described + [here][google.spanner.v1.TypeCode]. + + For example, consider the following table definition: + + :: + + CREATE TABLE UserEvents ( + UserName STRING(MAX), + EventDate STRING(10) + ) PRIMARY KEY(UserName, EventDate); + + The following keys name rows in this table: + + :: + + ["Bob", "2014-09-23"] + ["Alfred", "2015-06-12"] + + Since the ``UserEvents`` table's ``PRIMARY KEY`` clause names two + columns, each ``UserEvents`` key has two elements; the first is the + ``UserName``, and the second is the ``EventDate``. + + Key ranges with multiple components are interpreted + lexicographically by component using the table or index key's + declared sort order. For example, the following range returns all + events for user ``"Bob"`` that occurred in the year 2015: + + :: + + "start_closed": ["Bob", "2015-01-01"] + "end_closed": ["Bob", "2015-12-31"] + + Start and end keys can omit trailing key components. This affects + the inclusion and exclusion of rows that exactly match the provided + key components: if the key is closed, then rows that exactly match + the provided components are included; if the key is open, then rows + that exactly match are not included. + + For example, the following range includes all events for ``"Bob"`` + that occurred during and after the year 2000: + + :: + + "start_closed": ["Bob", "2000-01-01"] + "end_closed": ["Bob"] + + The next example retrieves all events for ``"Bob"``: + + :: + + "start_closed": ["Bob"] + "end_closed": ["Bob"] + + To retrieve events before the year 2000: + + :: + + "start_closed": ["Bob"] + "end_open": ["Bob", "2000-01-01"] + + The following range includes all rows in the table: + + :: + + "start_closed": [] + "end_closed": [] + + This range returns all users whose ``UserName`` begins with any + character from A to C: + + :: + + "start_closed": ["A"] + "end_open": ["D"] + + This range returns all users whose ``UserName`` begins with B: + + :: + + "start_closed": ["B"] + "end_open": ["C"] + + Key ranges honor column sort order. For example, suppose a table is + defined as follows: + + :: + + CREATE TABLE DescendingSortedTable { + Key INT64, + ... + ) PRIMARY KEY(Key DESC); + + The following range retrieves all rows with key values between 1 and + 100 inclusive: + + :: + + "start_closed": ["100"] + "end_closed": ["1"] + + Note that 100 is passed as the start, and 1 is passed as the end, + because ``Key`` is a descending column in the schema. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_closed (google.protobuf.struct_pb2.ListValue): + If the start is closed, then the range includes all rows + whose first ``len(start_closed)`` key columns exactly match + ``start_closed``. + + This field is a member of `oneof`_ ``start_key_type``. + start_open (google.protobuf.struct_pb2.ListValue): + If the start is open, then the range excludes rows whose + first ``len(start_open)`` key columns exactly match + ``start_open``. + + This field is a member of `oneof`_ ``start_key_type``. + end_closed (google.protobuf.struct_pb2.ListValue): + If the end is closed, then the range includes all rows whose + first ``len(end_closed)`` key columns exactly match + ``end_closed``. + + This field is a member of `oneof`_ ``end_key_type``. + end_open (google.protobuf.struct_pb2.ListValue): + If the end is open, then the range excludes rows whose first + ``len(end_open)`` key columns exactly match ``end_open``. + + This field is a member of `oneof`_ ``end_key_type``. + """ + + start_closed: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=1, + oneof='start_key_type', + message=struct_pb2.ListValue, + ) + start_open: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=2, + oneof='start_key_type', + message=struct_pb2.ListValue, + ) + end_closed: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=3, + oneof='end_key_type', + message=struct_pb2.ListValue, + ) + end_open: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=4, + oneof='end_key_type', + message=struct_pb2.ListValue, + ) + + +class KeySet(proto.Message): + r"""``KeySet`` defines a collection of Cloud Spanner keys and/or key + ranges. All the keys are expected to be in the same table or index. + The keys need not be sorted in any particular way. + + If the same key is specified multiple times in the set (for example + if two ranges, two keys, or a key and a range overlap), Cloud + Spanner behaves as if the key were only specified once. + + Attributes: + keys (MutableSequence[google.protobuf.struct_pb2.ListValue]): + A list of specific keys. Entries in ``keys`` should have + exactly as many elements as there are columns in the primary + or index key with which this ``KeySet`` is used. Individual + key values are encoded as described + [here][google.spanner.v1.TypeCode]. + ranges (MutableSequence[google.cloud.spanner_v1.types.KeyRange]): + A list of key ranges. See + [KeyRange][google.spanner.v1.KeyRange] for more information + about key range specifications. + all_ (bool): + For convenience ``all`` can be set to ``true`` to indicate + that this ``KeySet`` matches all keys in the table or index. + Note that any keys specified in ``keys`` or ``ranges`` are + only yielded once. + """ + + keys: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.ListValue, + ) + ranges: MutableSequence['KeyRange'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='KeyRange', + ) + all_: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py new file mode 100644 index 0000000000..0d4f7b4466 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import keys +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'Mutation', + }, +) + + +class Mutation(proto.Message): + r"""A modification to one or more Cloud Spanner rows. Mutations can be + applied to a Cloud Spanner database by sending them in a + [Commit][google.spanner.v1.Spanner.Commit] call. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + insert (google.cloud.spanner_v1.types.Mutation.Write): + Insert new rows in a table. If any of the rows already + exist, the write or transaction fails with error + ``ALREADY_EXISTS``. + + This field is a member of `oneof`_ ``operation``. + update (google.cloud.spanner_v1.types.Mutation.Write): + Update existing rows in a table. If any of the rows does not + already exist, the transaction fails with error + ``NOT_FOUND``. + + This field is a member of `oneof`_ ``operation``. + insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): + Like [insert][google.spanner.v1.Mutation.insert], except + that if the row already exists, then its column values are + overwritten with the ones provided. Any column values not + explicitly written are preserved. + + When using + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], + just as when using + [insert][google.spanner.v1.Mutation.insert], all + ``NOT NULL`` columns in the table must be given a value. + This holds true even when the row already exists and will + therefore actually be updated. + + This field is a member of `oneof`_ ``operation``. + replace (google.cloud.spanner_v1.types.Mutation.Write): + Like [insert][google.spanner.v1.Mutation.insert], except + that if the row already exists, it is deleted, and the + column values provided are inserted instead. Unlike + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], + this means any values not explicitly written become + ``NULL``. + + In an interleaved table, if you create the child table with + the ``ON DELETE CASCADE`` annotation, then replacing a + parent row also deletes the child rows. Otherwise, you must + delete the child rows before you replace the parent row. + + This field is a member of `oneof`_ ``operation``. + delete (google.cloud.spanner_v1.types.Mutation.Delete): + Delete rows from a table. Succeeds whether or + not the named rows were present. + + This field is a member of `oneof`_ ``operation``. + """ + + class Write(proto.Message): + r"""Arguments to [insert][google.spanner.v1.Mutation.insert], + [update][google.spanner.v1.Mutation.update], + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and + [replace][google.spanner.v1.Mutation.replace] operations. + + Attributes: + table (str): + Required. The table whose rows will be + written. + columns (MutableSequence[str]): + The names of the columns in + [table][google.spanner.v1.Mutation.Write.table] to be + written. + + The list of columns must contain enough columns to allow + Cloud Spanner to derive values for all primary key columns + in the row(s) to be modified. + values (MutableSequence[google.protobuf.struct_pb2.ListValue]): + The values to be written. ``values`` can contain more than + one list of values. If it does, then multiple rows are + written, one for each entry in ``values``. Each list in + ``values`` must have exactly as many entries as there are + entries in + [columns][google.spanner.v1.Mutation.Write.columns] above. + Sending multiple lists is equivalent to sending multiple + ``Mutation``\ s, each containing one ``values`` entry and + repeating [table][google.spanner.v1.Mutation.Write.table] + and [columns][google.spanner.v1.Mutation.Write.columns]. + Individual values in each list are encoded as described + [here][google.spanner.v1.TypeCode]. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + values: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=struct_pb2.ListValue, + ) + + class Delete(proto.Message): + r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. + + Attributes: + table (str): + Required. The table whose rows will be + deleted. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. The primary keys of the rows within + [table][google.spanner.v1.Mutation.Delete.table] to delete. + The primary keys must be specified in the order in which + they appear in the ``PRIMARY KEY()`` clause of the table's + equivalent DDL statement (the DDL statement used to create + the table). Delete is idempotent. The transaction will + succeed even if some or all rows do not exist. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + key_set: keys.KeySet = proto.Field( + proto.MESSAGE, + number=2, + message=keys.KeySet, + ) + + insert: Write = proto.Field( + proto.MESSAGE, + number=1, + oneof='operation', + message=Write, + ) + update: Write = proto.Field( + proto.MESSAGE, + number=2, + oneof='operation', + message=Write, + ) + insert_or_update: Write = proto.Field( + proto.MESSAGE, + number=3, + oneof='operation', + message=Write, + ) + replace: Write = proto.Field( + proto.MESSAGE, + number=4, + oneof='operation', + message=Write, + ) + delete: Delete = proto.Field( + proto.MESSAGE, + number=5, + oneof='operation', + message=Delete, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py new file mode 100644 index 0000000000..26e730d2d0 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'PlanNode', + 'QueryPlan', + }, +) + + +class PlanNode(proto.Message): + r"""Node information for nodes appearing in a + [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. + + Attributes: + index (int): + The ``PlanNode``'s index in [node + list][google.spanner.v1.QueryPlan.plan_nodes]. + kind (google.cloud.spanner_v1.types.PlanNode.Kind): + Used to determine the type of node. May be needed for + visualizing different kinds of nodes differently. For + example, If the node is a + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it + will have a condensed representation which can be used to + directly embed a description of the node in its parent. + display_name (str): + The display name for the node. + child_links (MutableSequence[google.cloud.spanner_v1.types.PlanNode.ChildLink]): + List of child node ``index``\ es and their relationship to + this parent. + short_representation (google.cloud.spanner_v1.types.PlanNode.ShortRepresentation): + Condensed representation for + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. + metadata (google.protobuf.struct_pb2.Struct): + Attributes relevant to the node contained in a group of + key-value pairs. For example, a Parameter Reference node + could have the following information in its metadata: + + :: + + { + "parameter_reference": "param1", + "parameter_type": "array" + } + execution_stats (google.protobuf.struct_pb2.Struct): + The execution statistics associated with the + node, contained in a group of key-value pairs. + Only present if the plan was returned as a + result of a profile query. For example, number + of executions, number of rows/time per execution + etc. + """ + class Kind(proto.Enum): + r"""The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes + between the two different kinds of nodes that can appear in a query + plan. + + Values: + KIND_UNSPECIFIED (0): + Not specified. + RELATIONAL (1): + Denotes a Relational operator node in the expression tree. + Relational operators represent iterative processing of rows + during query execution. For example, a ``TableScan`` + operation that reads rows from a table. + SCALAR (2): + Denotes a Scalar node in the expression tree. + Scalar nodes represent non-iterable entities in + the query plan. For example, constants or + arithmetic operators appearing inside predicate + expressions or references to column names. + """ + KIND_UNSPECIFIED = 0 + RELATIONAL = 1 + SCALAR = 2 + + class ChildLink(proto.Message): + r"""Metadata associated with a parent-child relationship appearing in a + [PlanNode][google.spanner.v1.PlanNode]. + + Attributes: + child_index (int): + The node to which the link points. + type_ (str): + The type of the link. For example, in Hash + Joins this could be used to distinguish between + the build child and the probe child, or in the + case of the child being an output variable, to + represent the tag associated with the output + variable. + variable (str): + Only present if the child node is + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and + corresponds to an output variable of the parent node. The + field carries the name of the output variable. For example, + a ``TableScan`` operator that reads rows from a table will + have child links to the ``SCALAR`` nodes representing the + output variables created for each column that is read by the + operator. The corresponding ``variable`` fields will be set + to the variable names assigned to the columns. + """ + + child_index: int = proto.Field( + proto.INT32, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + variable: str = proto.Field( + proto.STRING, + number=3, + ) + + class ShortRepresentation(proto.Message): + r"""Condensed representation of a node and its subtree. Only present for + ``SCALAR`` [PlanNode(s)][google.spanner.v1.PlanNode]. + + Attributes: + description (str): + A string representation of the expression + subtree rooted at this node. + subqueries (MutableMapping[str, int]): + A mapping of (subquery variable name) -> (subquery node id) + for cases where the ``description`` string of this node + references a ``SCALAR`` subquery contained in the expression + subtree rooted at this node. The referenced ``SCALAR`` + subquery may not necessarily be a direct child of this node. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + subqueries: MutableMapping[str, int] = proto.MapField( + proto.STRING, + proto.INT32, + number=2, + ) + + index: int = proto.Field( + proto.INT32, + number=1, + ) + kind: Kind = proto.Field( + proto.ENUM, + number=2, + enum=Kind, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + child_links: MutableSequence[ChildLink] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ChildLink, + ) + short_representation: ShortRepresentation = proto.Field( + proto.MESSAGE, + number=5, + message=ShortRepresentation, + ) + metadata: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + execution_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class QueryPlan(proto.Message): + r"""Contains an ordered list of nodes appearing in the query + plan. + + Attributes: + plan_nodes (MutableSequence[google.cloud.spanner_v1.types.PlanNode]): + The nodes in the query plan. Plan nodes are returned in + pre-order starting with the plan root. Each + [PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds + to its index in ``plan_nodes``. + """ + + plan_nodes: MutableSequence['PlanNode'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PlanNode', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py new file mode 100644 index 0000000000..9503d0c172 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import query_plan as gs_query_plan +from google.cloud.spanner_v1.types import transaction as gs_transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'ResultSet', + 'PartialResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + }, +) + + +class ResultSet(proto.Message): + r"""Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Attributes: + metadata (google.cloud.spanner_v1.types.ResultSetMetadata): + Metadata about the result set, such as row + type information. + rows (MutableSequence[google.protobuf.struct_pb2.ListValue]): + Each element in ``rows`` is a row whose format is defined by + [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. + The ith element in each row matches the ith field in + [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. + Elements are encoded based on type as described + [here][google.spanner.v1.TypeCode]. + stats (google.cloud.spanner_v1.types.ResultSetStats): + Query plan and execution statistics for the SQL statement + that produced this result set. These can be requested by + setting + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + DML statements always produce stats containing the number of + rows modified, unless executed using the + [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + Other fields might or might not be populated, based on the + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. A precommit token is included if the read-write + transaction is on a multiplexed session. Pass the precommit + token with the highest sequence number from this transaction + attempt to the [Commit][google.spanner.v1.Spanner.Commit] + request for this transaction. + """ + + metadata: 'ResultSetMetadata' = proto.Field( + proto.MESSAGE, + number=1, + message='ResultSetMetadata', + ) + rows: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.ListValue, + ) + stats: 'ResultSetStats' = proto.Field( + proto.MESSAGE, + number=3, + message='ResultSetStats', + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=5, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + + +class PartialResultSet(proto.Message): + r"""Partial results from a streaming read or SQL query. Streaming + reads and SQL queries better tolerate large result sets, large + rows, and large values, but are a little trickier to consume. + + Attributes: + metadata (google.cloud.spanner_v1.types.ResultSetMetadata): + Metadata about the result set, such as row + type information. Only present in the first + response. + values (MutableSequence[google.protobuf.struct_pb2.Value]): + A streamed result set consists of a stream of values, which + might be split into many ``PartialResultSet`` messages to + accommodate large rows and/or large values. Every N complete + values defines a row, where N is equal to the number of + entries in + [metadata.row_type.fields][google.spanner.v1.StructType.fields]. + + Most values are encoded based on type as described + [here][google.spanner.v1.TypeCode]. + + It's possible that the last value in values is "chunked", + meaning that the rest of the value is sent in subsequent + ``PartialResultSet``\ (s). This is denoted by the + [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] + field. Two or more chunked values can be merged to form a + complete value as follows: + + - ``bool/number/null``: can't be chunked + - ``string``: concatenate the strings + - ``list``: concatenate the lists. If the last element in a + list is a ``string``, ``list``, or ``object``, merge it + with the first element in the next list by applying these + rules recursively. + - ``object``: concatenate the (field name, field value) + pairs. If a field name is duplicated, then apply these + rules recursively to merge the field values. + + Some examples of merging: + + :: + + Strings are concatenated. + "foo", "bar" => "foobar" + + Lists of non-strings are concatenated. + [2, 3], [4] => [2, 3, 4] + + Lists are concatenated, but the last and first elements are merged + because they are strings. + ["a", "b"], ["c", "d"] => ["a", "bc", "d"] + + Lists are concatenated, but the last and first elements are merged + because they are lists. Recursively, the last and first elements + of the inner lists are merged because they are strings. + ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] + + Non-overlapping object fields are combined. + {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} + + Overlapping object fields are merged. + {"a": "1"}, {"a": "2"} => {"a": "12"} + + Examples of merging objects containing lists of strings. + {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} + + For a more complete example, suppose a streaming SQL query + is yielding a result set whose rows contain a single string + field. The following ``PartialResultSet``\ s might be + yielded: + + :: + + { + "metadata": { ... } + "values": ["Hello", "W"] + "chunked_value": true + "resume_token": "Af65..." + } + { + "values": ["orl"] + "chunked_value": true + } + { + "values": ["d"] + "resume_token": "Zx1B..." + } + + This sequence of ``PartialResultSet``\ s encodes two rows, + one containing the field value ``"Hello"``, and a second + containing the field value ``"World" = "W" + "orl" + "d"``. + + Not all ``PartialResultSet``\ s contain a ``resume_token``. + Execution can only be resumed from a previously yielded + ``resume_token``. For the above sequence of + ``PartialResultSet``\ s, resuming the query with + ``"resume_token": "Af65..."`` yields results from the + ``PartialResultSet`` with value "orl". + chunked_value (bool): + If true, then the final value in + [values][google.spanner.v1.PartialResultSet.values] is + chunked, and must be combined with more values from + subsequent ``PartialResultSet``\ s to obtain a complete + field value. + resume_token (bytes): + Streaming calls might be interrupted for a variety of + reasons, such as TCP connection loss. If this occurs, the + stream of results can be resumed by re-sending the original + request and including ``resume_token``. Note that executing + any other transaction in the same session invalidates the + token. + stats (google.cloud.spanner_v1.types.ResultSetStats): + Query plan and execution statistics for the statement that + produced this streaming result set. These can be requested + by setting + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + and are sent only once with the last response in the stream. + This field is also present in the last response for DML + statements. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. A precommit token is included if the read-write + transaction has multiplexed sessions enabled. Pass the + precommit token with the highest sequence number from this + transaction attempt to the + [Commit][google.spanner.v1.Spanner.Commit] request for this + transaction. + last (bool): + Optional. Indicates whether this is the last + ``PartialResultSet`` in the stream. The server might + optionally set this field. Clients shouldn't rely on this + field being set in all cases. + """ + + metadata: 'ResultSetMetadata' = proto.Field( + proto.MESSAGE, + number=1, + message='ResultSetMetadata', + ) + values: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + chunked_value: bool = proto.Field( + proto.BOOL, + number=3, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=4, + ) + stats: 'ResultSetStats' = proto.Field( + proto.MESSAGE, + number=5, + message='ResultSetStats', + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=8, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + last: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class ResultSetMetadata(proto.Message): + r"""Metadata about a [ResultSet][google.spanner.v1.ResultSet] or + [PartialResultSet][google.spanner.v1.PartialResultSet]. + + Attributes: + row_type (google.cloud.spanner_v1.types.StructType): + Indicates the field names and types for the rows in the + result set. For example, a SQL query like + ``"SELECT UserId, UserName FROM Users"`` could return a + ``row_type`` value like: + + :: + + "fields": [ + { "name": "UserId", "type": { "code": "INT64" } }, + { "name": "UserName", "type": { "code": "STRING" } }, + ] + transaction (google.cloud.spanner_v1.types.Transaction): + If the read or SQL query began a transaction + as a side-effect, the information about the new + transaction is yielded here. + undeclared_parameters (google.cloud.spanner_v1.types.StructType): + A SQL query can be parameterized. In PLAN mode, these + parameters can be undeclared. This indicates the field names + and types for those undeclared parameters in the SQL query. + For example, a SQL query like + ``"SELECT * FROM Users where UserId = @userId and UserName = @userName "`` + could return a ``undeclared_parameters`` value like: + + :: + + "fields": [ + { "name": "UserId", "type": { "code": "INT64" } }, + { "name": "UserName", "type": { "code": "STRING" } }, + ] + """ + + row_type: gs_type.StructType = proto.Field( + proto.MESSAGE, + number=1, + message=gs_type.StructType, + ) + transaction: gs_transaction.Transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.Transaction, + ) + undeclared_parameters: gs_type.StructType = proto.Field( + proto.MESSAGE, + number=3, + message=gs_type.StructType, + ) + + +class ResultSetStats(proto.Message): + r"""Additional statistics about a + [ResultSet][google.spanner.v1.ResultSet] or + [PartialResultSet][google.spanner.v1.PartialResultSet]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_plan (google.cloud.spanner_v1.types.QueryPlan): + [QueryPlan][google.spanner.v1.QueryPlan] for the query + associated with this result. + query_stats (google.protobuf.struct_pb2.Struct): + Aggregated statistics from the execution of the query. Only + present when the query is profiled. For example, a query + could return the statistics as follows: + + :: + + { + "rows_returned": "3", + "elapsed_time": "1.22 secs", + "cpu_time": "1.19 secs" + } + row_count_exact (int): + Standard DML returns an exact count of rows + that were modified. + + This field is a member of `oneof`_ ``row_count``. + row_count_lower_bound (int): + Partitioned DML doesn't offer exactly-once + semantics, so it returns a lower bound of the + rows modified. + + This field is a member of `oneof`_ ``row_count``. + """ + + query_plan: gs_query_plan.QueryPlan = proto.Field( + proto.MESSAGE, + number=1, + message=gs_query_plan.QueryPlan, + ) + query_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + row_count_exact: int = proto.Field( + proto.INT64, + number=3, + oneof='row_count', + ) + row_count_lower_bound: int = proto.Field( + proto.INT64, + number=4, + oneof='row_count', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py new file mode 100644 index 0000000000..2742de8269 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py @@ -0,0 +1,1782 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import keys +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import transaction as gs_transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'CreateSessionRequest', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'Session', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'DeleteSessionRequest', + 'RequestOptions', + 'DirectedReadOptions', + 'ExecuteSqlRequest', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'Partition', + 'PartitionResponse', + 'ReadRequest', + 'BeginTransactionRequest', + 'CommitRequest', + 'RollbackRequest', + 'BatchWriteRequest', + 'BatchWriteResponse', + }, +) + + +class CreateSessionRequest(proto.Message): + r"""The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + + Attributes: + database (str): + Required. The database in which the new + session is created. + session (google.cloud.spanner_v1.types.Session): + Required. The session to create. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + session: 'Session' = proto.Field( + proto.MESSAGE, + number=2, + message='Session', + ) + + +class BatchCreateSessionsRequest(proto.Message): + r"""The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + Attributes: + database (str): + Required. The database in which the new + sessions are created. + session_template (google.cloud.spanner_v1.types.Session): + Parameters to apply to each created session. + session_count (int): + Required. The number of sessions to be created in this batch + call. The API can return fewer than the requested number of + sessions. If a specific number of sessions are desired, the + client can make additional calls to ``BatchCreateSessions`` + (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + session_template: 'Session' = proto.Field( + proto.MESSAGE, + number=2, + message='Session', + ) + session_count: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateSessionsResponse(proto.Message): + r"""The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + Attributes: + session (MutableSequence[google.cloud.spanner_v1.types.Session]): + The freshly created sessions. + """ + + session: MutableSequence['Session'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Session', + ) + + +class Session(proto.Message): + r"""A session in the Cloud Spanner API. + + Attributes: + name (str): + Output only. The name of the session. This is + always system-assigned. + labels (MutableMapping[str, str]): + The labels for the session. + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + - No more than 64 labels can be associated with a given + session. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the session + is created. + approximate_last_use_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The approximate timestamp when + the session is last used. It's typically earlier + than the actual last use time. + creator_role (str): + The database role which created this session. + multiplexed (bool): + Optional. If ``true``, specifies a multiplexed session. Use + a multiplexed session for multiple, concurrent read-only + operations. Don't use them for read-write transactions, + partitioned reads, or partitioned queries. Use + [``sessions.create``][google.spanner.v1.Spanner.CreateSession] + to create multiplexed sessions. Don't use + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions] + to create a multiplexed session. You can't delete or list + multiplexed sessions. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + approximate_last_use_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + creator_role: str = proto.Field( + proto.STRING, + number=5, + ) + multiplexed: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class GetSessionRequest(proto.Message): + r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. + + Attributes: + name (str): + Required. The name of the session to + retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSessionsRequest(proto.Message): + r"""The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Attributes: + database (str): + Required. The database in which to list + sessions. + page_size (int): + Number of sessions to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] + from a previous + [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. + filter (str): + An expression for filtering the results of the request. + Filter rules are case insensitive. The fields eligible for + filtering are: + + - ``labels.key`` where key is the name of a label + + Some examples of using filters are: + + - ``labels.env:*`` --> The session has the label "env". + - ``labels.env:dev`` --> The session has the label "env" and + the value of the label contains the string "dev". + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSessionsResponse(proto.Message): + r"""The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Attributes: + sessions (MutableSequence[google.cloud.spanner_v1.types.Session]): + The list of requested sessions. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListSessions][google.spanner.v1.Spanner.ListSessions] call + to fetch more of the matching sessions. + """ + + @property + def raw_page(self): + return self + + sessions: MutableSequence['Session'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Session', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSessionRequest(proto.Message): + r"""The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + + Attributes: + name (str): + Required. The name of the session to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RequestOptions(proto.Message): + r"""Common request options for various APIs. + + Attributes: + priority (google.cloud.spanner_v1.types.RequestOptions.Priority): + Priority for the request. + request_tag (str): + A per-request tag which can be applied to queries or reads, + used for statistics collection. Both ``request_tag`` and + ``transaction_tag`` can be specified for a read or query + that belongs to a transaction. This field is ignored for + requests where it's not applicable (for example, + ``CommitRequest``). Legal characters for ``request_tag`` + values are all printable characters (ASCII 32 - 126) and the + length of a request_tag is limited to 50 characters. Values + that exceed this limit are truncated. Any leading underscore + (\_) characters are removed from the string. + transaction_tag (str): + A tag used for statistics collection about this transaction. + Both ``request_tag`` and ``transaction_tag`` can be + specified for a read or query that belongs to a transaction. + The value of transaction_tag should be the same for all + requests belonging to the same transaction. If this request + doesn't belong to any transaction, ``transaction_tag`` is + ignored. Legal characters for ``transaction_tag`` values are + all printable characters (ASCII 32 - 126) and the length of + a ``transaction_tag`` is limited to 50 characters. Values + that exceed this limit are truncated. Any leading underscore + (\_) characters are removed from the string. + """ + class Priority(proto.Enum): + r"""The relative priority for requests. Note that priority isn't + applicable for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + The priority acts as a hint to the Cloud Spanner scheduler and + doesn't guarantee priority or order of execution. For example: + + - Some parts of a write operation always execute at + ``PRIORITY_HIGH``, regardless of the specified priority. This can + cause you to see an increase in high priority workload even when + executing a low priority request. This can also potentially cause + a priority inversion where a lower priority request is fulfilled + ahead of a higher priority request. + - If a transaction contains multiple operations with different + priorities, Cloud Spanner doesn't guarantee to process the higher + priority operations first. There might be other constraints to + satisfy, such as the order of operations. + + Values: + PRIORITY_UNSPECIFIED (0): + ``PRIORITY_UNSPECIFIED`` is equivalent to ``PRIORITY_HIGH``. + PRIORITY_LOW (1): + This specifies that the request is low + priority. + PRIORITY_MEDIUM (2): + This specifies that the request is medium + priority. + PRIORITY_HIGH (3): + This specifies that the request is high + priority. + """ + PRIORITY_UNSPECIFIED = 0 + PRIORITY_LOW = 1 + PRIORITY_MEDIUM = 2 + PRIORITY_HIGH = 3 + + priority: Priority = proto.Field( + proto.ENUM, + number=1, + enum=Priority, + ) + request_tag: str = proto.Field( + proto.STRING, + number=2, + ) + transaction_tag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DirectedReadOptions(proto.Message): + r"""The ``DirectedReadOptions`` can be used to indicate which replicas + or regions should be used for non-transactional reads or queries. + + ``DirectedReadOptions`` can only be specified for a read-only + transaction, otherwise the API returns an ``INVALID_ARGUMENT`` + error. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.IncludeReplicas): + ``Include_replicas`` indicates the order of replicas (as + they appear in this list) to process the request. If + ``auto_failover_disabled`` is set to ``true`` and all + replicas are exhausted without finding a healthy replica, + Spanner waits for a replica in the list to become available, + requests might fail due to ``DEADLINE_EXCEEDED`` errors. + + This field is a member of `oneof`_ ``replicas``. + exclude_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.ExcludeReplicas): + ``Exclude_replicas`` indicates that specified replicas + should be excluded from serving requests. Spanner doesn't + route requests to the replicas in this list. + + This field is a member of `oneof`_ ``replicas``. + """ + + class ReplicaSelection(proto.Message): + r"""The directed read replica selector. Callers must provide one or more + of the following fields for replica selection: + + - ``location`` - The location must be one of the regions within the + multi-region configuration of your database. + - ``type`` - The type of the replica. + + Some examples of using replica_selectors are: + + - ``location:us-east1`` --> The "us-east1" replica(s) of any + available type is used to process the request. + - ``type:READ_ONLY`` --> The "READ_ONLY" type replica(s) in the + nearest available location are used to process the request. + - ``location:us-east1 type:READ_ONLY`` --> The "READ_ONLY" type + replica(s) in location "us-east1" is used to process the request. + + Attributes: + location (str): + The location or region of the serving + requests, for example, "us-east1". + type_ (google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection.Type): + The type of replica. + """ + class Type(proto.Enum): + r"""Indicates the type of replica. + + Values: + TYPE_UNSPECIFIED (0): + Not specified. + READ_WRITE (1): + Read-write replicas support both reads and + writes. + READ_ONLY (2): + Read-only replicas only support reads (not + writes). + """ + TYPE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DirectedReadOptions.ReplicaSelection.Type' = proto.Field( + proto.ENUM, + number=2, + enum='DirectedReadOptions.ReplicaSelection.Type', + ) + + class IncludeReplicas(proto.Message): + r"""An ``IncludeReplicas`` contains a repeated set of + ``ReplicaSelection`` which indicates the order in which replicas + should be considered. + + Attributes: + replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): + The directed read replica selector. + auto_failover_disabled (bool): + If ``true``, Spanner doesn't route requests to a replica + outside the <``include_replicas`` list when all of the + specified replicas are unavailable or unhealthy. Default + value is ``false``. + """ + + replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DirectedReadOptions.ReplicaSelection', + ) + auto_failover_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class ExcludeReplicas(proto.Message): + r"""An ExcludeReplicas contains a repeated set of + ReplicaSelection that should be excluded from serving requests. + + Attributes: + replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): + The directed read replica selector. + """ + + replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DirectedReadOptions.ReplicaSelection', + ) + + include_replicas: IncludeReplicas = proto.Field( + proto.MESSAGE, + number=1, + oneof='replicas', + message=IncludeReplicas, + ) + exclude_replicas: ExcludeReplicas = proto.Field( + proto.MESSAGE, + number=2, + oneof='replicas', + message=ExcludeReplicas, + ) + + +class ExecuteSqlRequest(proto.Message): + r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] + and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + + Attributes: + session (str): + Required. The session in which the SQL query + should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + The transaction to use. + + For queries, if none is provided, the default is + a temporary read-only transaction with strong + concurrency. + + Standard DML statements require a read-write + transaction. To protect against replays, + single-use transactions are not supported. The + caller must either supply an existing + transaction ID or begin a new transaction. + + Partitioned DML requires an existing Partitioned + DML transaction ID. + sql (str): + Required. The SQL string. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + SQL string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names must conform to the naming + requirements of identifiers as specified at + https://cloud.google.com/spanner/docs/lexical#identifiers. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It's an error to execute a SQL statement with unbound + parameters. + param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): + It isn't always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON + strings. + + In these cases, you can use ``param_types`` to specify the + exact SQL type for some or all of the SQL statement + parameters. See the definition of + [Type][google.spanner.v1.Type] for more information about + SQL types. + resume_token (bytes): + If this request is resuming a previously interrupted SQL + statement execution, ``resume_token`` should be copied from + the last + [PartialResultSet][google.spanner.v1.PartialResultSet] + yielded before the interruption. Doing this enables the new + SQL statement execution to resume where the last one left + off. The rest of the request parameters must exactly match + the request that yielded this token. + query_mode (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode): + Used to control the amount of debugging information returned + in [ResultSetStats][google.spanner.v1.ResultSetStats]. If + [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] + is set, + [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + can only be set to + [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. + partition_token (bytes): + If present, results are restricted to the specified + partition previously created using ``PartitionQuery``. There + must be an exact match for the values of fields common to + this message and the ``PartitionQueryRequest`` message used + to create this ``partition_token``. + seqno (int): + A per-transaction sequence number used to + identify this request. This field makes each + request idempotent such that if the request is + received multiple times, at most one succeeds. + + The sequence number must be monotonically + increasing within the transaction. If a request + arrives for the first time with an out-of-order + sequence number, the transaction can be aborted. + Replays of previously handled requests yield the + same response as the first execution. + + Required for DML statements. Ignored for + queries. + query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): + Query optimizer configuration to use for the + given query. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): + Directed read options for this request. + data_boost_enabled (bool): + If this is for a partitioned query and this field is set to + ``true``, the request is executed with Spanner Data Boost + independent compute resources. + + If the field is set to ``true`` but the request doesn't set + ``partition_token``, the API returns an ``INVALID_ARGUMENT`` + error. + last_statement (bool): + Optional. If set to ``true``, this statement marks the end + of the transaction. After this statement executes, you must + commit or abort the transaction. Attempts to execute any + other requests against this transaction (including reads and + queries) are rejected. + + For DML statements, setting this option might cause some + error reporting to be deferred until commit time (for + example, validation of unique constraints). Given this, + successful execution of a DML statement shouldn't be assumed + until a subsequent ``Commit`` call completes successfully. + """ + class QueryMode(proto.Enum): + r"""Mode in which the statement must be processed. + + Values: + NORMAL (0): + The default mode. Only the statement results + are returned. + PLAN (1): + This mode returns only the query plan, + without any results or execution statistics + information. + PROFILE (2): + This mode returns the query plan, overall + execution statistics, operator level execution + statistics along with the results. This has a + performance overhead compared to the other + modes. It isn't recommended to use this mode for + production traffic. + WITH_STATS (3): + This mode returns the overall (but not + operator-level) execution statistics along with + the results. + WITH_PLAN_AND_STATS (4): + This mode returns the query plan, overall + (but not operator-level) execution statistics + along with the results. + """ + NORMAL = 0 + PLAN = 1 + PROFILE = 2 + WITH_STATS = 3 + WITH_PLAN_AND_STATS = 4 + + class QueryOptions(proto.Message): + r"""Query optimizer configuration. + + Attributes: + optimizer_version (str): + An option to control the selection of optimizer version. + + This parameter allows individual queries to pick different + query optimizer versions. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest supported query optimizer version. If not + specified, Cloud Spanner uses the optimizer version set at + the database level options. Any other positive integer (from + the list of supported optimizer versions) overrides the + default optimizer version for query execution. + + The list of supported optimizer versions can be queried from + ``SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS``. + + Executing a SQL statement with an invalid optimizer version + fails with an ``INVALID_ARGUMENT`` error. + + See + https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer + for more information on managing the query optimizer. + + The ``optimizer_version`` statement hint has precedence over + this setting. + optimizer_statistics_package (str): + An option to control the selection of optimizer statistics + package. + + This parameter allows individual queries to use a different + query optimizer statistics package. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest generated statistics package. If not + specified, Cloud Spanner uses the statistics package set at + the database level options, or the latest package if the + database option isn't set. + + The statistics package requested by the query has to be + exempt from garbage collection. This can be achieved with + the following DDL statement: + + .. code:: sql + + ALTER STATISTICS SET OPTIONS (allow_gc=false) + + The list of available statistics packages can be queried + from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. + + Executing a SQL statement with an invalid optimizer + statistics package or with a statistics package that allows + garbage collection fails with an ``INVALID_ARGUMENT`` error. + """ + + optimizer_version: str = proto.Field( + proto.STRING, + number=1, + ) + optimizer_statistics_package: str = proto.Field( + proto.STRING, + number=2, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + sql: str = proto.Field( + proto.STRING, + number=3, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + param_types: MutableMapping[str, gs_type.Type] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=gs_type.Type, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=6, + ) + query_mode: QueryMode = proto.Field( + proto.ENUM, + number=7, + enum=QueryMode, + ) + partition_token: bytes = proto.Field( + proto.BYTES, + number=8, + ) + seqno: int = proto.Field( + proto.INT64, + number=9, + ) + query_options: QueryOptions = proto.Field( + proto.MESSAGE, + number=10, + message=QueryOptions, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=11, + message='RequestOptions', + ) + directed_read_options: 'DirectedReadOptions' = proto.Field( + proto.MESSAGE, + number=15, + message='DirectedReadOptions', + ) + data_boost_enabled: bool = proto.Field( + proto.BOOL, + number=16, + ) + last_statement: bool = proto.Field( + proto.BOOL, + number=17, + ) + + +class ExecuteBatchDmlRequest(proto.Message): + r"""The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + + Attributes: + session (str): + Required. The session in which the DML + statements should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Required. The transaction to use. Must be a + read-write transaction. + To protect against replays, single-use + transactions are not supported. The caller must + either supply an existing transaction ID or + begin a new transaction. + statements (MutableSequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement]): + Required. The list of statements to execute in this batch. + Statements are executed serially, such that the effects of + statement ``i`` are visible to statement ``i+1``. Each + statement must be a DML statement. Execution stops at the + first failed statement; the remaining statements are not + executed. + + Callers must provide at least one statement. + seqno (int): + Required. A per-transaction sequence number + used to identify this request. This field makes + each request idempotent such that if the request + is received multiple times, at most one + succeeds. + + The sequence number must be monotonically + increasing within the transaction. If a request + arrives for the first time with an out-of-order + sequence number, the transaction might be + aborted. Replays of previously handled requests + yield the same response as the first execution. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + last_statements (bool): + Optional. If set to ``true``, this request marks the end of + the transaction. After these statements execute, you must + commit or abort the transaction. Attempts to execute any + other requests against this transaction (including reads and + queries) are rejected. + + Setting this option might cause some error reporting to be + deferred until commit time (for example, validation of + unique constraints). Given this, successful execution of + statements shouldn't be assumed until a subsequent + ``Commit`` call completes successfully. + """ + + class Statement(proto.Message): + r"""A single DML statement. + + Attributes: + sql (str): + Required. The DML string. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + DML string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names can contain letters, + numbers, and underscores. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It's an error to execute a SQL statement with unbound + parameters. + param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): + It isn't always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] + as JSON strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL statement + parameters. See the definition of + [Type][google.spanner.v1.Type] for more information about + SQL types. + """ + + sql: str = proto.Field( + proto.STRING, + number=1, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + param_types: MutableMapping[str, gs_type.Type] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message=gs_type.Type, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + statements: MutableSequence[Statement] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Statement, + ) + seqno: int = proto.Field( + proto.INT64, + number=4, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=5, + message='RequestOptions', + ) + last_statements: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ExecuteBatchDmlResponse(proto.Message): + r"""The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of [ResultSet][google.spanner.v1.ResultSet] + messages, one for each DML statement that has successfully executed, + in the same order as the statements in the request. If a statement + fails, the status in the response body identifies the cause of the + failure. + + To check for DML statements that failed, use the following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value ``OK`` indicates + that all statements were executed successfully. + 2. If the status was not ``OK``, check the number of result sets in + the response. If the response contains ``N`` + [ResultSet][google.spanner.v1.ResultSet] messages, then statement + ``N+1`` in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed successfully. + - Response: 5 [ResultSet][google.spanner.v1.ResultSet] messages, + with the status ``OK``. + + Example 2: + + - Request: 5 DML statements. The third statement has a syntax error. + - Response: 2 [ResultSet][google.spanner.v1.ResultSet] messages, and + a syntax error (``INVALID_ARGUMENT``) status. The number of + [ResultSet][google.spanner.v1.ResultSet] messages indicates that + the third statement failed, and the fourth and fifth statements + were not executed. + + Attributes: + result_sets (MutableSequence[google.cloud.spanner_v1.types.ResultSet]): + One [ResultSet][google.spanner.v1.ResultSet] for each + statement in the request that ran successfully, in the same + order as the statements in the request. Each + [ResultSet][google.spanner.v1.ResultSet] does not contain + any rows. The + [ResultSetStats][google.spanner.v1.ResultSetStats] in each + [ResultSet][google.spanner.v1.ResultSet] contain the number + of rows modified by the statement. + + Only the first [ResultSet][google.spanner.v1.ResultSet] in + the response contains valid + [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. + status (google.rpc.status_pb2.Status): + If all DML statements are executed successfully, the status + is ``OK``. Otherwise, the error status of the first failed + statement. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. A precommit token is included if the read-write + transaction is on a multiplexed session. Pass the precommit + token with the highest sequence number from this transaction + attempt should be passed to the + [Commit][google.spanner.v1.Spanner.Commit] request for this + transaction. + """ + + result_sets: MutableSequence[result_set.ResultSet] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=result_set.ResultSet, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=3, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + + +class PartitionOptions(proto.Message): + r"""Options for a ``PartitionQueryRequest`` and + ``PartitionReadRequest``. + + Attributes: + partition_size_bytes (int): + **Note:** This hint is currently ignored by + ``PartitionQuery`` and ``PartitionRead`` requests. + + The desired data size for each partition generated. The + default for this option is currently 1 GiB. This is only a + hint. The actual size of each partition can be smaller or + larger than this size request. + max_partitions (int): + **Note:** This hint is currently ignored by + ``PartitionQuery`` and ``PartitionRead`` requests. + + The desired maximum number of partitions to return. For + example, this might be set to the number of workers + available. The default for this option is currently 10,000. + The maximum value is currently 200,000. This is only a hint. + The actual number of partitions returned can be smaller or + larger than this maximum count request. + """ + + partition_size_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + max_partitions: int = proto.Field( + proto.INT64, + number=2, + ) + + +class PartitionQueryRequest(proto.Message): + r"""The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + + Attributes: + session (str): + Required. The session used to create the + partitions. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Read-only snapshot transactions are + supported, read and write and single-use + transactions are not. + sql (str): + Required. The query request to generate partitions for. The + request fails if the query isn't root partitionable. For a + query to be root partitionable, it needs to satisfy a few + conditions. For example, if the query execution plan + contains a distributed union operator, then it must be the + first operator in the plan. For more information about other + conditions, see `Read data in + parallel `__. + + The query request must not contain DML commands, such as + ``INSERT``, ``UPDATE``, or ``DELETE``. Use + [``ExecuteStreamingSql``][google.spanner.v1.Spanner.ExecuteStreamingSql] + with a ``PartitionedDml`` transaction for large, + partition-friendly DML operations. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + SQL string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names can contain letters, + numbers, and underscores. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It's an error to execute a SQL statement with unbound + parameters. + param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): + It isn't always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.PartitionQueryRequest.params] as + JSON strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL query parameters. + See the definition of [Type][google.spanner.v1.Type] for + more information about SQL types. + partition_options (google.cloud.spanner_v1.types.PartitionOptions): + Additional options that affect how many + partitions are created. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + sql: str = proto.Field( + proto.STRING, + number=3, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + param_types: MutableMapping[str, gs_type.Type] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=gs_type.Type, + ) + partition_options: 'PartitionOptions' = proto.Field( + proto.MESSAGE, + number=6, + message='PartitionOptions', + ) + + +class PartitionReadRequest(proto.Message): + r"""The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + Attributes: + session (str): + Required. The session used to create the + partitions. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Read only snapshot transactions are + supported, read/write and single use + transactions are not. + table (str): + Required. The name of the table in the + database to be read. + index (str): + If non-empty, the name of an index on + [table][google.spanner.v1.PartitionReadRequest.table]. This + index is used instead of the table primary key when + interpreting + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + and sorting result rows. See + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + for further information. + columns (MutableSequence[str]): + The columns of + [table][google.spanner.v1.PartitionReadRequest.table] to be + returned for each row matching this request. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. ``key_set`` identifies the rows to be yielded. + ``key_set`` names the primary keys of the rows in + [table][google.spanner.v1.PartitionReadRequest.table] to be + yielded, unless + [index][google.spanner.v1.PartitionReadRequest.index] is + present. If + [index][google.spanner.v1.PartitionReadRequest.index] is + present, then + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + instead names index keys in + [index][google.spanner.v1.PartitionReadRequest.index]. + + It isn't an error for the ``key_set`` to name rows that + don't exist in the database. Read yields nothing for + nonexistent rows. + partition_options (google.cloud.spanner_v1.types.PartitionOptions): + Additional options that affect how many + partitions are created. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + table: str = proto.Field( + proto.STRING, + number=3, + ) + index: str = proto.Field( + proto.STRING, + number=4, + ) + columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + key_set: keys.KeySet = proto.Field( + proto.MESSAGE, + number=6, + message=keys.KeySet, + ) + partition_options: 'PartitionOptions' = proto.Field( + proto.MESSAGE, + number=9, + message='PartitionOptions', + ) + + +class Partition(proto.Message): + r"""Information returned for each partition returned in a + PartitionResponse. + + Attributes: + partition_token (bytes): + This token can be passed to ``Read``, ``StreamingRead``, + ``ExecuteSql``, or ``ExecuteStreamingSql`` requests to + restrict the results to those identified by this partition + token. + """ + + partition_token: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class PartitionResponse(proto.Message): + r"""The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + Attributes: + partitions (MutableSequence[google.cloud.spanner_v1.types.Partition]): + Partitions created by this request. + transaction (google.cloud.spanner_v1.types.Transaction): + Transaction created by this request. + """ + + partitions: MutableSequence['Partition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Partition', + ) + transaction: gs_transaction.Transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.Transaction, + ) + + +class ReadRequest(proto.Message): + r"""The request for [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + + Attributes: + session (str): + Required. The session in which the read + should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + The transaction to use. If none is provided, + the default is a temporary read-only transaction + with strong concurrency. + table (str): + Required. The name of the table in the + database to be read. + index (str): + If non-empty, the name of an index on + [table][google.spanner.v1.ReadRequest.table]. This index is + used instead of the table primary key when interpreting + [key_set][google.spanner.v1.ReadRequest.key_set] and sorting + result rows. See + [key_set][google.spanner.v1.ReadRequest.key_set] for further + information. + columns (MutableSequence[str]): + Required. The columns of + [table][google.spanner.v1.ReadRequest.table] to be returned + for each row matching this request. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. ``key_set`` identifies the rows to be yielded. + ``key_set`` names the primary keys of the rows in + [table][google.spanner.v1.ReadRequest.table] to be yielded, + unless [index][google.spanner.v1.ReadRequest.index] is + present. If [index][google.spanner.v1.ReadRequest.index] is + present, then + [key_set][google.spanner.v1.ReadRequest.key_set] instead + names index keys in + [index][google.spanner.v1.ReadRequest.index]. + + If the + [partition_token][google.spanner.v1.ReadRequest.partition_token] + field is empty, rows are yielded in table primary key order + (if [index][google.spanner.v1.ReadRequest.index] is empty) + or index key order (if + [index][google.spanner.v1.ReadRequest.index] is non-empty). + If the + [partition_token][google.spanner.v1.ReadRequest.partition_token] + field isn't empty, rows are yielded in an unspecified order. + + It isn't an error for the ``key_set`` to name rows that + don't exist in the database. Read yields nothing for + nonexistent rows. + limit (int): + If greater than zero, only the first ``limit`` rows are + yielded. If ``limit`` is zero, the default is no limit. A + limit can't be specified if ``partition_token`` is set. + resume_token (bytes): + If this request is resuming a previously interrupted read, + ``resume_token`` should be copied from the last + [PartialResultSet][google.spanner.v1.PartialResultSet] + yielded before the interruption. Doing this enables the new + read to resume where the last read left off. The rest of the + request parameters must exactly match the request that + yielded this token. + partition_token (bytes): + If present, results are restricted to the specified + partition previously created using ``PartitionRead``. There + must be an exact match for the values of fields common to + this message and the PartitionReadRequest message used to + create this partition_token. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): + Directed read options for this request. + data_boost_enabled (bool): + If this is for a partitioned read and this field is set to + ``true``, the request is executed with Spanner Data Boost + independent compute resources. + + If the field is set to ``true`` but the request doesn't set + ``partition_token``, the API returns an ``INVALID_ARGUMENT`` + error. + order_by (google.cloud.spanner_v1.types.ReadRequest.OrderBy): + Optional. Order for the returned rows. + + By default, Spanner returns result rows in primary key order + except for PartitionRead requests. For applications that + don't require rows to be returned in primary key + (``ORDER_BY_PRIMARY_KEY``) order, setting + ``ORDER_BY_NO_ORDER`` option allows Spanner to optimize row + retrieval, resulting in lower latencies in certain cases + (for example, bulk point lookups). + lock_hint (google.cloud.spanner_v1.types.ReadRequest.LockHint): + Optional. Lock Hint for the request, it can + only be used with read-write transactions. + """ + class OrderBy(proto.Enum): + r"""An option to control the order in which rows are returned + from a read. + + Values: + ORDER_BY_UNSPECIFIED (0): + Default value. + + ``ORDER_BY_UNSPECIFIED`` is equivalent to + ``ORDER_BY_PRIMARY_KEY``. + ORDER_BY_PRIMARY_KEY (1): + Read rows are returned in primary key order. + + In the event that this option is used in conjunction with + the ``partition_token`` field, the API returns an + ``INVALID_ARGUMENT`` error. + ORDER_BY_NO_ORDER (2): + Read rows are returned in any order. + """ + ORDER_BY_UNSPECIFIED = 0 + ORDER_BY_PRIMARY_KEY = 1 + ORDER_BY_NO_ORDER = 2 + + class LockHint(proto.Enum): + r"""A lock hint mechanism for reads done within a transaction. + + Values: + LOCK_HINT_UNSPECIFIED (0): + Default value. + + ``LOCK_HINT_UNSPECIFIED`` is equivalent to + ``LOCK_HINT_SHARED``. + LOCK_HINT_SHARED (1): + Acquire shared locks. + + By default when you perform a read as part of a read-write + transaction, Spanner acquires shared read locks, which + allows other reads to still access the data until your + transaction is ready to commit. When your transaction is + committing and writes are being applied, the transaction + attempts to upgrade to an exclusive lock for any data you + are writing. For more information about locks, see `Lock + modes `__. + LOCK_HINT_EXCLUSIVE (2): + Acquire exclusive locks. + + Requesting exclusive locks is beneficial if you observe high + write contention, which means you notice that multiple + transactions are concurrently trying to read and write to + the same data, resulting in a large number of aborts. This + problem occurs when two transactions initially acquire + shared locks and then both try to upgrade to exclusive locks + at the same time. In this situation both transactions are + waiting for the other to give up their lock, resulting in a + deadlocked situation. Spanner is able to detect this + occurring and force one of the transactions to abort. + However, this is a slow and expensive operation and results + in lower performance. In this case it makes sense to acquire + exclusive locks at the start of the transaction because then + when multiple transactions try to act on the same data, they + automatically get serialized. Each transaction waits its + turn to acquire the lock and avoids getting into deadlock + situations. + + Because the exclusive lock hint is just a hint, it shouldn't + be considered equivalent to a mutex. In other words, you + shouldn't use Spanner exclusive locks as a mutual exclusion + mechanism for the execution of code outside of Spanner. + + **Note:** Request exclusive locks judiciously because they + block others from reading that data for the entire + transaction, rather than just when the writes are being + performed. Unless you observe high write contention, you + should use the default of shared read locks so you don't + prematurely block other clients from reading the data that + you're writing to. + """ + LOCK_HINT_UNSPECIFIED = 0 + LOCK_HINT_SHARED = 1 + LOCK_HINT_EXCLUSIVE = 2 + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + table: str = proto.Field( + proto.STRING, + number=3, + ) + index: str = proto.Field( + proto.STRING, + number=4, + ) + columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + key_set: keys.KeySet = proto.Field( + proto.MESSAGE, + number=6, + message=keys.KeySet, + ) + limit: int = proto.Field( + proto.INT64, + number=8, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=9, + ) + partition_token: bytes = proto.Field( + proto.BYTES, + number=10, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=11, + message='RequestOptions', + ) + directed_read_options: 'DirectedReadOptions' = proto.Field( + proto.MESSAGE, + number=14, + message='DirectedReadOptions', + ) + data_boost_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + order_by: OrderBy = proto.Field( + proto.ENUM, + number=16, + enum=OrderBy, + ) + lock_hint: LockHint = proto.Field( + proto.ENUM, + number=17, + enum=LockHint, + ) + + +class BeginTransactionRequest(proto.Message): + r"""The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + Attributes: + session (str): + Required. The session in which the + transaction runs. + options (google.cloud.spanner_v1.types.TransactionOptions): + Required. Options for the new transaction. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. Priority is ignored for + this request. Setting the priority in this + ``request_options`` struct doesn't do anything. To set the + priority for a transaction, set it on the reads and writes + that are part of this transaction instead. + mutation_key (google.cloud.spanner_v1.types.Mutation): + Optional. Required for read-write + transactions on a multiplexed session that + commit mutations but don't perform any reads or + queries. You must randomly select one of the + mutations from the mutation set and send it as a + part of this request. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + options: gs_transaction.TransactionOptions = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionOptions, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='RequestOptions', + ) + mutation_key: mutation.Mutation = proto.Field( + proto.MESSAGE, + number=4, + message=mutation.Mutation, + ) + + +class CommitRequest(proto.Message): + r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + session (str): + Required. The session in which the + transaction to be committed is running. + transaction_id (bytes): + Commit a previously-started transaction. + + This field is a member of `oneof`_ ``transaction``. + single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, commit + with a temporary transaction is non-idempotent. That is, if + the ``CommitRequest`` is sent to Cloud Spanner more than + once (for instance, due to retries in the application, or in + the transport library), it's possible that the mutations are + executed more than once. If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This field is a member of `oneof`_ ``transaction``. + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + The mutations to be executed when this + transaction commits. All mutations are applied + atomically, in the order they appear in this + list. + return_commit_stats (bool): + If ``true``, then statistics related to the transaction is + included in the + [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. + Default value is ``false``. + max_commit_delay (google.protobuf.duration_pb2.Duration): + Optional. The amount of latency this request + is configured to incur in order to improve + throughput. If this field isn't set, Spanner + assumes requests are relatively latency + sensitive and automatically determines an + appropriate delay time. You can specify a commit + delay value between 0 and 500 ms. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. If the read-write transaction was executed on a + multiplexed session, then you must include the precommit + token with the highest sequence number received in this + transaction attempt. Failing to do so results in a + ``FailedPrecondition`` error. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction_id: bytes = proto.Field( + proto.BYTES, + number=2, + oneof='transaction', + ) + single_use_transaction: gs_transaction.TransactionOptions = proto.Field( + proto.MESSAGE, + number=3, + oneof='transaction', + message=gs_transaction.TransactionOptions, + ) + mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=mutation.Mutation, + ) + return_commit_stats: bool = proto.Field( + proto.BOOL, + number=5, + ) + max_commit_delay: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=6, + message='RequestOptions', + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=9, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + + +class RollbackRequest(proto.Message): + r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. + + Attributes: + session (str): + Required. The session in which the + transaction to roll back is running. + transaction_id (bytes): + Required. The transaction to roll back. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction_id: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class BatchWriteRequest(proto.Message): + r"""The request for [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + + Attributes: + session (str): + Required. The session in which the batch + request is to be run. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): + Required. The groups of mutations to be + applied. + exclude_txn_from_change_streams (bool): + Optional. If you don't set the + ``exclude_txn_from_change_streams`` option or if it's set to + ``false``, then any change streams monitoring columns + modified by transactions will capture the updates made + within that transaction. + """ + + class MutationGroup(proto.Message): + r"""A group of mutations to be committed together. Related + mutations should be placed in a group. For example, two + mutations inserting rows with the same primary key prefix in + both parent and child tables are related. + + Attributes: + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + Required. The mutations in this group. + """ + + mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=mutation.Mutation, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='RequestOptions', + ) + mutation_groups: MutableSequence[MutationGroup] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=MutationGroup, + ) + exclude_txn_from_change_streams: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class BatchWriteResponse(proto.Message): + r"""The result of applying a batch of mutations. + + Attributes: + indexes (MutableSequence[int]): + The mutation groups applied in this batch. The values index + into the ``mutation_groups`` field in the corresponding + ``BatchWriteRequest``. + status (google.rpc.status_pb2.Status): + An ``OK`` status indicates success. Any other status + indicates a failure. + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + The commit timestamp of the transaction that applied this + batch. Present if ``status`` is ``OK``, absent otherwise. + """ + + indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=1, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py new file mode 100644 index 0000000000..5e13285565 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py @@ -0,0 +1,491 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'TransactionOptions', + 'Transaction', + 'TransactionSelector', + 'MultiplexedSessionPrecommitToken', + }, +) + + +class TransactionOptions(proto.Message): + r"""Options to use for transactions. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): + Transaction may write. + + Authorization to begin a read-write transaction requires + ``spanner.databases.beginOrRollbackReadWriteTransaction`` + permission on the ``session`` resource. + + This field is a member of `oneof`_ ``mode``. + partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): + Partitioned DML transaction. + + Authorization to begin a Partitioned DML transaction + requires + ``spanner.databases.beginPartitionedDmlTransaction`` + permission on the ``session`` resource. + + This field is a member of `oneof`_ ``mode``. + read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): + Transaction does not write. + + Authorization to begin a read-only transaction requires + ``spanner.databases.beginReadOnlyTransaction`` permission on + the ``session`` resource. + + This field is a member of `oneof`_ ``mode``. + exclude_txn_from_change_streams (bool): + When ``exclude_txn_from_change_streams`` is set to ``true``, + it prevents read or write transactions from being tracked in + change streams. + + - If the DDL option ``allow_txn_exclusion`` is set to + ``true``, then the updates made within this transaction + aren't recorded in the change stream. + + - If you don't set the DDL option ``allow_txn_exclusion`` or + if it's set to ``false``, then the updates made within + this transaction are recorded in the change stream. + + When ``exclude_txn_from_change_streams`` is set to ``false`` + or not set, modifications from this transaction are recorded + in all change streams that are tracking columns modified by + these transactions. + + The ``exclude_txn_from_change_streams`` option can only be + specified for read-write or partitioned DML transactions, + otherwise the API returns an ``INVALID_ARGUMENT`` error. + isolation_level (google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel): + Isolation level for the transaction. + """ + class IsolationLevel(proto.Enum): + r"""``IsolationLevel`` is used when setting ``isolation_level`` for a + transaction. + + Values: + ISOLATION_LEVEL_UNSPECIFIED (0): + Default value. + + If the value is not specified, the ``SERIALIZABLE`` + isolation level is used. + SERIALIZABLE (1): + All transactions appear as if they executed in a serial + order, even if some of the reads, writes, and other + operations of distinct transactions actually occurred in + parallel. Spanner assigns commit timestamps that reflect the + order of committed transactions to implement this property. + Spanner offers a stronger guarantee than serializability + called external consistency. For more information, see + `TrueTime and external + consistency `__. + REPEATABLE_READ (2): + All reads performed during the transaction observe a + consistent snapshot of the database, and the transaction is + only successfully committed in the absence of conflicts + between its updates and any concurrent updates that have + occurred since that snapshot. Consequently, in contrast to + ``SERIALIZABLE`` transactions, only write-write conflicts + are detected in snapshot transactions. + + This isolation level does not support Read-only and + Partitioned DML transactions. + + When ``REPEATABLE_READ`` is specified on a read-write + transaction, the locking semantics default to + ``OPTIMISTIC``. + """ + ISOLATION_LEVEL_UNSPECIFIED = 0 + SERIALIZABLE = 1 + REPEATABLE_READ = 2 + + class ReadWrite(proto.Message): + r"""Message type to initiate a read-write transaction. Currently + this transaction type has no options. + + Attributes: + read_lock_mode (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite.ReadLockMode): + Read lock mode for the transaction. + multiplexed_session_previous_transaction_id (bytes): + Optional. Clients should pass the transaction + ID of the previous transaction attempt that was + aborted if this transaction is being executed on + a multiplexed session. + """ + class ReadLockMode(proto.Enum): + r"""``ReadLockMode`` is used to set the read lock mode for read-write + transactions. + + Values: + READ_LOCK_MODE_UNSPECIFIED (0): + Default value. + + - If isolation level is + [REPEATABLE_READ][google.spanner.v1.TransactionOptions.IsolationLevel.REPEATABLE_READ], + then it is an error to specify ``read_lock_mode``. Locking + semantics default to ``OPTIMISTIC``. No validation checks + are done for reads, except to validate that the data that + was served at the snapshot time is unchanged at commit + time in the following cases: + + 1. reads done as part of queries that use + ``SELECT FOR UPDATE`` + 2. reads done as part of statements with a + ``LOCK_SCANNED_RANGES`` hint + 3. reads done as part of DML statements + + - At all other isolation levels, if ``read_lock_mode`` is + the default value, then pessimistic read locks are used. + PESSIMISTIC (1): + Pessimistic lock mode. + + Read locks are acquired immediately on read. Semantics + described only applies to + [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] + isolation. + OPTIMISTIC (2): + Optimistic lock mode. + + Locks for reads within the transaction are not acquired on + read. Instead the locks are acquired on a commit to validate + that read/queried data has not changed since the transaction + started. Semantics described only applies to + [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] + isolation. + """ + READ_LOCK_MODE_UNSPECIFIED = 0 + PESSIMISTIC = 1 + OPTIMISTIC = 2 + + read_lock_mode: 'TransactionOptions.ReadWrite.ReadLockMode' = proto.Field( + proto.ENUM, + number=1, + enum='TransactionOptions.ReadWrite.ReadLockMode', + ) + multiplexed_session_previous_transaction_id: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + class PartitionedDml(proto.Message): + r"""Message type to initiate a Partitioned DML transaction. + """ + + class ReadOnly(proto.Message): + r"""Message type to initiate a read-only transaction. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + strong (bool): + Read at a timestamp where all previously + committed transactions are visible. + + This field is a member of `oneof`_ ``timestamp_bound``. + min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Executes all reads at a timestamp >= ``min_read_timestamp``. + + This is useful for requesting fresher data than some + previous read, or data that is fresh enough to observe the + effects of some previously committed transaction whose + timestamp is known. + + Note that this option can only be used in single-use + transactions. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + + This field is a member of `oneof`_ ``timestamp_bound``. + max_staleness (google.protobuf.duration_pb2.Duration): + Read data at a timestamp >= ``NOW - max_staleness`` seconds. + Guarantees that all writes that have committed more than the + specified number of seconds ago are visible. Because Cloud + Spanner chooses the exact timestamp, this mode works even if + the client's local clock is substantially skewed from Cloud + Spanner commit timestamps. + + Useful for reading the freshest data available at a nearby + replica, while bounding the possible staleness if the local + replica has fallen behind. + + Note that this option can only be used in single-use + transactions. + + This field is a member of `oneof`_ ``timestamp_bound``. + read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Executes all reads at the given timestamp. Unlike other + modes, reads at a specific timestamp are repeatable; the + same read at the same timestamp always returns the same + data. If the timestamp is in the future, the read is blocked + until the specified timestamp, modulo the read's deadline. + + Useful for large scale consistent reads such as mapreduces, + or for coordinating many reads against a consistent snapshot + of the data. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + + This field is a member of `oneof`_ ``timestamp_bound``. + exact_staleness (google.protobuf.duration_pb2.Duration): + Executes all reads at a timestamp that is + ``exact_staleness`` old. The timestamp is chosen soon after + the read is started. + + Guarantees that all writes that have committed more than the + specified number of seconds ago are visible. Because Cloud + Spanner chooses the exact timestamp, this mode works even if + the client's local clock is substantially skewed from Cloud + Spanner commit timestamps. + + Useful for reading at nearby replicas without the + distributed timestamp negotiation overhead of + ``max_staleness``. + + This field is a member of `oneof`_ ``timestamp_bound``. + return_read_timestamp (bool): + If true, the Cloud Spanner-selected read timestamp is + included in the [Transaction][google.spanner.v1.Transaction] + message that describes the transaction. + """ + + strong: bool = proto.Field( + proto.BOOL, + number=1, + oneof='timestamp_bound', + ) + min_read_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof='timestamp_bound', + message=timestamp_pb2.Timestamp, + ) + max_staleness: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + oneof='timestamp_bound', + message=duration_pb2.Duration, + ) + read_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + oneof='timestamp_bound', + message=timestamp_pb2.Timestamp, + ) + exact_staleness: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + oneof='timestamp_bound', + message=duration_pb2.Duration, + ) + return_read_timestamp: bool = proto.Field( + proto.BOOL, + number=6, + ) + + read_write: ReadWrite = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ReadWrite, + ) + partitioned_dml: PartitionedDml = proto.Field( + proto.MESSAGE, + number=3, + oneof='mode', + message=PartitionedDml, + ) + read_only: ReadOnly = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=ReadOnly, + ) + exclude_txn_from_change_streams: bool = proto.Field( + proto.BOOL, + number=5, + ) + isolation_level: IsolationLevel = proto.Field( + proto.ENUM, + number=6, + enum=IsolationLevel, + ) + + +class Transaction(proto.Message): + r"""A transaction. + + Attributes: + id (bytes): + ``id`` may be used to identify the transaction in subsequent + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], + [Commit][google.spanner.v1.Spanner.Commit], or + [Rollback][google.spanner.v1.Spanner.Rollback] calls. + + Single-use read-only transactions do not have IDs, because + single-use transactions do not support multiple requests. + read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + For snapshot read-only transactions, the read timestamp + chosen for the transaction. Not returned by default: see + [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + A precommit token is included in the response of a + BeginTransaction request if the read-write transaction is on + a multiplexed session and a mutation_key was specified in + the + [BeginTransaction][google.spanner.v1.BeginTransactionRequest]. + The precommit token with the highest sequence number from + this transaction attempt should be passed to the + [Commit][google.spanner.v1.Spanner.Commit] request for this + transaction. + """ + + id: bytes = proto.Field( + proto.BYTES, + number=1, + ) + read_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + precommit_token: 'MultiplexedSessionPrecommitToken' = proto.Field( + proto.MESSAGE, + number=3, + message='MultiplexedSessionPrecommitToken', + ) + + +class TransactionSelector(proto.Message): + r"""This message is used to select the transaction in which a + [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. + + See [TransactionOptions][google.spanner.v1.TransactionOptions] for + more information about transactions. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + single_use (google.cloud.spanner_v1.types.TransactionOptions): + Execute the read or SQL query in a temporary + transaction. This is the most efficient way to + execute a transaction that consists of a single + SQL query. + + This field is a member of `oneof`_ ``selector``. + id (bytes): + Execute the read or SQL query in a + previously-started transaction. + + This field is a member of `oneof`_ ``selector``. + begin (google.cloud.spanner_v1.types.TransactionOptions): + Begin a new transaction and execute this read or SQL query + in it. The transaction ID of the new transaction is returned + in + [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], + which is a [Transaction][google.spanner.v1.Transaction]. + + This field is a member of `oneof`_ ``selector``. + """ + + single_use: 'TransactionOptions' = proto.Field( + proto.MESSAGE, + number=1, + oneof='selector', + message='TransactionOptions', + ) + id: bytes = proto.Field( + proto.BYTES, + number=2, + oneof='selector', + ) + begin: 'TransactionOptions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='selector', + message='TransactionOptions', + ) + + +class MultiplexedSessionPrecommitToken(proto.Message): + r"""When a read-write transaction is executed on a multiplexed session, + this precommit token is sent back to the client as a part of the + [Transaction][google.spanner.v1.Transaction] message in the + [BeginTransaction][google.spanner.v1.BeginTransactionRequest] + response and also as a part of the + [ResultSet][google.spanner.v1.ResultSet] and + [PartialResultSet][google.spanner.v1.PartialResultSet] responses. + + Attributes: + precommit_token (bytes): + Opaque precommit token. + seq_num (int): + An incrementing seq number is generated on + every precommit token that is returned. Clients + should remember the precommit token with the + highest sequence number from the current + transaction attempt. + """ + + precommit_token: bytes = proto.Field( + proto.BYTES, + number=1, + ) + seq_num: int = proto.Field( + proto.INT32, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py new file mode 100644 index 0000000000..5b6365599e --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'TypeCode', + 'TypeAnnotationCode', + 'Type', + 'StructType', + }, +) + + +class TypeCode(proto.Enum): + r"""``TypeCode`` is used as part of [Type][google.spanner.v1.Type] to + indicate the type of a Cloud Spanner value. + + Each legal value of a type can be encoded to or decoded from a JSON + value, using the encodings described below. All Cloud Spanner values + can be ``null``, regardless of type; ``null``\ s are always encoded + as a JSON ``null``. + + Values: + TYPE_CODE_UNSPECIFIED (0): + Not specified. + BOOL (1): + Encoded as JSON ``true`` or ``false``. + INT64 (2): + Encoded as ``string``, in decimal format. + FLOAT64 (3): + Encoded as ``number``, or the strings ``"NaN"``, + ``"Infinity"``, or ``"-Infinity"``. + FLOAT32 (15): + Encoded as ``number``, or the strings ``"NaN"``, + ``"Infinity"``, or ``"-Infinity"``. + TIMESTAMP (4): + Encoded as ``string`` in RFC 3339 timestamp format. The time + zone must be present, and must be ``"Z"``. + + If the schema has the column option + ``allow_commit_timestamp=true``, the placeholder string + ``"spanner.commit_timestamp()"`` can be used to instruct the + system to insert the commit timestamp associated with the + transaction commit. + DATE (5): + Encoded as ``string`` in RFC 3339 date format. + STRING (6): + Encoded as ``string``. + BYTES (7): + Encoded as a base64-encoded ``string``, as described in RFC + 4648, section 4. + ARRAY (8): + Encoded as ``list``, where the list elements are represented + according to + [array_element_type][google.spanner.v1.Type.array_element_type]. + STRUCT (9): + Encoded as ``list``, where list element ``i`` is represented + according to + [struct_type.fields[i]][google.spanner.v1.StructType.fields]. + NUMERIC (10): + Encoded as ``string``, in decimal format or scientific + notation format. Decimal format: ``[+-]Digits[.[Digits]]`` + or ``[+-][Digits].Digits`` + + Scientific notation: + ``[+-]Digits[.[Digits]][ExponentIndicator[+-]Digits]`` or + ``[+-][Digits].Digits[ExponentIndicator[+-]Digits]`` + (ExponentIndicator is ``"e"`` or ``"E"``) + JSON (11): + Encoded as a JSON-formatted ``string`` as described in RFC + 7159. The following rules are applied when parsing JSON + input: + + - Whitespace characters are not preserved. + - If a JSON object has duplicate keys, only the first key is + preserved. + - Members of a JSON object are not guaranteed to have their + order preserved. + - JSON array elements will have their order preserved. + PROTO (13): + Encoded as a base64-encoded ``string``, as described in RFC + 4648, section 4. + ENUM (14): + Encoded as ``string``, in decimal format. + INTERVAL (16): + Encoded as ``string``, in ``ISO8601`` duration format - + ``P[n]Y[n]M[n]DT[n]H[n]M[n[.fraction]]S`` where ``n`` is an + integer. For example, ``P1Y2M3DT4H5M6.5S`` represents time + duration of 1 year, 2 months, 3 days, 4 hours, 5 minutes, + and 6.5 seconds. + UUID (17): + Encoded as ``string``, in lower-case hexa-decimal format, as + described in RFC 9562, section 4. + """ + TYPE_CODE_UNSPECIFIED = 0 + BOOL = 1 + INT64 = 2 + FLOAT64 = 3 + FLOAT32 = 15 + TIMESTAMP = 4 + DATE = 5 + STRING = 6 + BYTES = 7 + ARRAY = 8 + STRUCT = 9 + NUMERIC = 10 + JSON = 11 + PROTO = 13 + ENUM = 14 + INTERVAL = 16 + UUID = 17 + + +class TypeAnnotationCode(proto.Enum): + r"""``TypeAnnotationCode`` is used as a part of + [Type][google.spanner.v1.Type] to disambiguate SQL types that should + be used for a given Cloud Spanner value. Disambiguation is needed + because the same Cloud Spanner type can be mapped to different SQL + types depending on SQL dialect. TypeAnnotationCode doesn't affect + the way value is serialized. + + Values: + TYPE_ANNOTATION_CODE_UNSPECIFIED (0): + Not specified. + PG_NUMERIC (2): + PostgreSQL compatible NUMERIC type. This annotation needs to + be applied to [Type][google.spanner.v1.Type] instances + having [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] type + code to specify that values of this type should be treated + as PostgreSQL NUMERIC values. Currently this annotation is + always needed for + [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] when a client + interacts with PostgreSQL-enabled Spanner databases. + PG_JSONB (3): + PostgreSQL compatible JSONB type. This annotation needs to + be applied to [Type][google.spanner.v1.Type] instances + having [JSON][google.spanner.v1.TypeCode.JSON] type code to + specify that values of this type should be treated as + PostgreSQL JSONB values. Currently this annotation is always + needed for [JSON][google.spanner.v1.TypeCode.JSON] when a + client interacts with PostgreSQL-enabled Spanner databases. + PG_OID (4): + PostgreSQL compatible OID type. This + annotation can be used by a client interacting + with PostgreSQL-enabled Spanner database to + specify that a value should be treated using the + semantics of the OID type. + """ + TYPE_ANNOTATION_CODE_UNSPECIFIED = 0 + PG_NUMERIC = 2 + PG_JSONB = 3 + PG_OID = 4 + + +class Type(proto.Message): + r"""``Type`` indicates the type of a Cloud Spanner value, as might be + stored in a table cell or returned from an SQL query. + + Attributes: + code (google.cloud.spanner_v1.types.TypeCode): + Required. The [TypeCode][google.spanner.v1.TypeCode] for + this type. + array_element_type (google.cloud.spanner_v1.types.Type): + If [code][google.spanner.v1.Type.code] == + [ARRAY][google.spanner.v1.TypeCode.ARRAY], then + ``array_element_type`` is the type of the array elements. + struct_type (google.cloud.spanner_v1.types.StructType): + If [code][google.spanner.v1.Type.code] == + [STRUCT][google.spanner.v1.TypeCode.STRUCT], then + ``struct_type`` provides type information for the struct's + fields. + type_annotation (google.cloud.spanner_v1.types.TypeAnnotationCode): + The + [TypeAnnotationCode][google.spanner.v1.TypeAnnotationCode] + that disambiguates SQL type that Spanner will use to + represent values of this type during query processing. This + is necessary for some type codes because a single + [TypeCode][google.spanner.v1.TypeCode] can be mapped to + different SQL types depending on the SQL dialect. + [type_annotation][google.spanner.v1.Type.type_annotation] + typically is not needed to process the content of a value + (it doesn't affect serialization) and clients can ignore it + on the read path. + proto_type_fqn (str): + If [code][google.spanner.v1.Type.code] == + [PROTO][google.spanner.v1.TypeCode.PROTO] or + [code][google.spanner.v1.Type.code] == + [ENUM][google.spanner.v1.TypeCode.ENUM], then + ``proto_type_fqn`` is the fully qualified name of the proto + type representing the proto/enum definition. + """ + + code: 'TypeCode' = proto.Field( + proto.ENUM, + number=1, + enum='TypeCode', + ) + array_element_type: 'Type' = proto.Field( + proto.MESSAGE, + number=2, + message='Type', + ) + struct_type: 'StructType' = proto.Field( + proto.MESSAGE, + number=3, + message='StructType', + ) + type_annotation: 'TypeAnnotationCode' = proto.Field( + proto.ENUM, + number=4, + enum='TypeAnnotationCode', + ) + proto_type_fqn: str = proto.Field( + proto.STRING, + number=5, + ) + + +class StructType(proto.Message): + r"""``StructType`` defines the fields of a + [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. + + Attributes: + fields (MutableSequence[google.cloud.spanner_v1.types.StructType.Field]): + The list of fields that make up this struct. Order is + significant, because values of this struct type are + represented as lists, where the order of field values + matches the order of fields in the + [StructType][google.spanner.v1.StructType]. In turn, the + order of fields matches the order of columns in a read + request, or the order of fields in the ``SELECT`` clause of + a query. + """ + + class Field(proto.Message): + r"""Message representing a single field of a struct. + + Attributes: + name (str): + The name of the field. For reads, this is the column name. + For SQL queries, it is the column alias (e.g., ``"Word"`` in + the query ``"SELECT 'hello' AS Word"``), or the column name + (e.g., ``"ColName"`` in the query + ``"SELECT ColName FROM Table"``). Some columns might have an + empty name (e.g., ``"SELECT UPPER(ColName)"``). Note that a + query result can contain multiple fields with the same name. + type_ (google.cloud.spanner_v1.types.Type): + The type of the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'Type' = proto.Field( + proto.MESSAGE, + number=2, + message='Type', + ) + + fields: MutableSequence[Field] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Field, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/mypy.ini b/owl-bot-staging/spanner/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/spanner/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/spanner/v1/noxfile.py b/owl-bot-staging/spanner/v1/noxfile.py new file mode 100644 index 0000000000..594241417c --- /dev/null +++ b/owl-bot-staging/spanner/v1/noxfile.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] + +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-spanner" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=PREVIEW_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json new file mode 100644 index 0000000000..f7f33c3d29 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -0,0 +1,2579 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.spanner.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-spanner", + "version": "0.0.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_create_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchCreateSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "session_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", + "shortName": "batch_create_sessions" + }, + "description": "Sample for BatchCreateSessions", + "file": "spanner_v1_generated_spanner_batch_create_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_create_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.batch_create_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchCreateSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "session_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", + "shortName": "batch_create_sessions" + }, + "description": "Sample for BatchCreateSessions", + "file": "spanner_v1_generated_spanner_batch_create_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_create_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_write", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchWrite", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchWriteRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "mutation_groups", + "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "spanner_v1_generated_spanner_batch_write_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchWrite_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_write_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.batch_write", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchWrite", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchWriteRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "mutation_groups", + "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "spanner_v1_generated_spanner_batch_write_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchWrite_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_write_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.begin_transaction", + "method": { + "fullName": "google.spanner.v1.Spanner.BeginTransaction", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BeginTransaction" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "options", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Transaction", + "shortName": "begin_transaction" + }, + "description": "Sample for BeginTransaction", + "file": "spanner_v1_generated_spanner_begin_transaction_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_begin_transaction_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.begin_transaction", + "method": { + "fullName": "google.spanner.v1.Spanner.BeginTransaction", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BeginTransaction" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "options", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Transaction", + "shortName": "begin_transaction" + }, + "description": "Sample for BeginTransaction", + "file": "spanner_v1_generated_spanner_begin_transaction_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_begin_transaction_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.commit", + "method": { + "fullName": "google.spanner.v1.Spanner.Commit", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Commit" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CommitRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "mutations", + "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" + }, + { + "name": "single_use_transaction", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.CommitResponse", + "shortName": "commit" + }, + "description": "Sample for Commit", + "file": "spanner_v1_generated_spanner_commit_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Commit_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_commit_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.commit", + "method": { + "fullName": "google.spanner.v1.Spanner.Commit", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Commit" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CommitRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "mutations", + "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" + }, + { + "name": "single_use_transaction", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.CommitResponse", + "shortName": "commit" + }, + "description": "Sample for Commit", + "file": "spanner_v1_generated_spanner_commit_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Commit_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_commit_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.create_session", + "method": { + "fullName": "google.spanner.v1.Spanner.CreateSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "CreateSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CreateSessionRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "create_session" + }, + "description": "Sample for CreateSession", + "file": "spanner_v1_generated_spanner_create_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_CreateSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_create_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.create_session", + "method": { + "fullName": "google.spanner.v1.Spanner.CreateSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "CreateSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CreateSessionRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "create_session" + }, + "description": "Sample for CreateSession", + "file": "spanner_v1_generated_spanner_create_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_CreateSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_create_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.delete_session", + "method": { + "fullName": "google.spanner.v1.Spanner.DeleteSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "DeleteSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_session" + }, + "description": "Sample for DeleteSession", + "file": "spanner_v1_generated_spanner_delete_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_DeleteSession_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_delete_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.delete_session", + "method": { + "fullName": "google.spanner.v1.Spanner.DeleteSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "DeleteSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_session" + }, + "description": "Sample for DeleteSession", + "file": "spanner_v1_generated_spanner_delete_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_DeleteSession_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_delete_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_batch_dml", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteBatchDml" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", + "shortName": "execute_batch_dml" + }, + "description": "Sample for ExecuteBatchDml", + "file": "spanner_v1_generated_spanner_execute_batch_dml_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_batch_dml_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.execute_batch_dml", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteBatchDml" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", + "shortName": "execute_batch_dml" + }, + "description": "Sample for ExecuteBatchDml", + "file": "spanner_v1_generated_spanner_execute_batch_dml_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_batch_dml_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "spanner_v1_generated_spanner_execute_sql_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_sql_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.execute_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "spanner_v1_generated_spanner_execute_sql_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_sql_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_streaming_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteStreamingSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "execute_streaming_sql" + }, + "description": "Sample for ExecuteStreamingSql", + "file": "spanner_v1_generated_spanner_execute_streaming_sql_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_streaming_sql_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.execute_streaming_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteStreamingSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "execute_streaming_sql" + }, + "description": "Sample for ExecuteStreamingSql", + "file": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.get_session", + "method": { + "fullName": "google.spanner.v1.Spanner.GetSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "GetSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.GetSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "get_session" + }, + "description": "Sample for GetSession", + "file": "spanner_v1_generated_spanner_get_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_GetSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_get_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.get_session", + "method": { + "fullName": "google.spanner.v1.Spanner.GetSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "GetSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.GetSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "get_session" + }, + "description": "Sample for GetSession", + "file": "spanner_v1_generated_spanner_get_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_GetSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_get_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.list_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.ListSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ListSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "spanner_v1_generated_spanner_list_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ListSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_list_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.list_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.ListSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ListSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "spanner_v1_generated_spanner_list_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ListSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_list_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_query", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionQuery", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_query" + }, + "description": "Sample for PartitionQuery", + "file": "spanner_v1_generated_spanner_partition_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.partition_query", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionQuery", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_query" + }, + "description": "Sample for PartitionQuery", + "file": "spanner_v1_generated_spanner_partition_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_read", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_read" + }, + "description": "Sample for PartitionRead", + "file": "spanner_v1_generated_spanner_partition_read_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionRead_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_read_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.partition_read", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_read" + }, + "description": "Sample for PartitionRead", + "file": "spanner_v1_generated_spanner_partition_read_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionRead_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_read_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.read", + "method": { + "fullName": "google.spanner.v1.Spanner.Read", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Read" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "read" + }, + "description": "Sample for Read", + "file": "spanner_v1_generated_spanner_read_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Read_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_read_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.read", + "method": { + "fullName": "google.spanner.v1.Spanner.Read", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Read" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "read" + }, + "description": "Sample for Read", + "file": "spanner_v1_generated_spanner_read_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Read_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_read_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.rollback", + "method": { + "fullName": "google.spanner.v1.Spanner.Rollback", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Rollback" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.RollbackRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "rollback" + }, + "description": "Sample for Rollback", + "file": "spanner_v1_generated_spanner_rollback_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Rollback_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_rollback_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.rollback", + "method": { + "fullName": "google.spanner.v1.Spanner.Rollback", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Rollback" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.RollbackRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "rollback" + }, + "description": "Sample for Rollback", + "file": "spanner_v1_generated_spanner_rollback_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Rollback_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_rollback_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.streaming_read", + "method": { + "fullName": "google.spanner.v1.Spanner.StreamingRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "StreamingRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "streaming_read" + }, + "description": "Sample for StreamingRead", + "file": "spanner_v1_generated_spanner_streaming_read_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_StreamingRead_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_streaming_read_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.streaming_read", + "method": { + "fullName": "google.spanner.v1.Spanner.StreamingRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "StreamingRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "streaming_read" + }, + "description": "Sample for StreamingRead", + "file": "spanner_v1_generated_spanner_streaming_read_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_StreamingRead_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_streaming_read_sync.py" + } + ] +} diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py new file mode 100644 index 0000000000..49e64b4ab8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchCreateSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = await client.batch_create_sessions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BatchCreateSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py new file mode 100644 index 0000000000..ade1da3661 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchCreateSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = client.batch_create_sessions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BatchCreateSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py new file mode 100644 index 0000000000..d1565657e8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchWrite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = await client.batch_write(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_BatchWrite_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py new file mode 100644 index 0000000000..9b6621def9 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchWrite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = client.batch_write(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_BatchWrite_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py new file mode 100644 index 0000000000..efdd161715 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BeginTransaction +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BeginTransaction_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = await client.begin_transaction(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BeginTransaction_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py new file mode 100644 index 0000000000..764dab8aa2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BeginTransaction +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BeginTransaction_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BeginTransaction_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py new file mode 100644 index 0000000000..f61c297d38 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Commit +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Commit_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_commit(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = await client.commit(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Commit_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py new file mode 100644 index 0000000000..a945bd2234 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Commit +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Commit_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_commit(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Commit_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py new file mode 100644 index 0000000000..8cddc00c66 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_CreateSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_create_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = await client.create_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_CreateSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py new file mode 100644 index 0000000000..b9de2d34e0 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_CreateSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_create_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = client.create_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_CreateSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py new file mode 100644 index 0000000000..9fed1ddca6 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_DeleteSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + await client.delete_session(request=request) + + +# [END spanner_v1_generated_Spanner_DeleteSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py new file mode 100644 index 0000000000..1f2a17e2d1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_DeleteSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + client.delete_session(request=request) + + +# [END spanner_v1_generated_Spanner_DeleteSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py new file mode 100644 index 0000000000..8313fd66a0 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteBatchDml +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteBatchDml_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = await client.execute_batch_dml(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteBatchDml_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py new file mode 100644 index 0000000000..dd4696b6b2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteBatchDml +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteBatchDml_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = client.execute_batch_dml(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteBatchDml_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py new file mode 100644 index 0000000000..a12b20f3e9 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteSql_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py new file mode 100644 index 0000000000..761d0ca251 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteSql_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py new file mode 100644 index 0000000000..86b8eb910e --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteStreamingSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = await client.execute_streaming_sql(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py new file mode 100644 index 0000000000..dc7dba43b8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteStreamingSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = client.execute_streaming_sql(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py new file mode 100644 index 0000000000..d2e50f9891 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_GetSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_get_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_GetSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py new file mode 100644 index 0000000000..36d6436b04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_GetSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_get_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_GetSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py new file mode 100644 index 0000000000..95aa4bf818 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ListSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_Spanner_ListSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py new file mode 100644 index 0000000000..a9533fed0d --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ListSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_Spanner_ListSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py new file mode 100644 index 0000000000..200fb2f6a2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.partition_query(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionQuery_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py new file mode 100644 index 0000000000..d486a3590c --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.partition_query(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionQuery_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py new file mode 100644 index 0000000000..99055ade8b --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionRead_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = await client.partition_read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py new file mode 100644 index 0000000000..0ca01ac423 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionRead_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = client.partition_read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionRead_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py new file mode 100644 index 0000000000..e555865245 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Read +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Read_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = await client.read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Read_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py new file mode 100644 index 0000000000..8f9ee621f3 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Read +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Read_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = client.read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Read_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py new file mode 100644 index 0000000000..f99a1b8dd8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rollback +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Rollback_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_rollback(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + await client.rollback(request=request) + + +# [END spanner_v1_generated_Spanner_Rollback_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py new file mode 100644 index 0000000000..00b23b21fc --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rollback +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Rollback_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_rollback(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + client.rollback(request=request) + + +# [END spanner_v1_generated_Spanner_Rollback_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py new file mode 100644 index 0000000000..f79b9a96a1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_StreamingRead_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = await client.streaming_read(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_StreamingRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py new file mode 100644 index 0000000000..f81ed34b33 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_StreamingRead_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = client.streaming_read(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_StreamingRead_sync] diff --git a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py new file mode 100644 index 0000000000..c7f41be11e --- /dev/null +++ b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spannerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_sessions': ('database', 'session_count', 'session_template', ), + 'batch_write': ('session', 'mutation_groups', 'request_options', 'exclude_txn_from_change_streams', ), + 'begin_transaction': ('session', 'options', 'request_options', 'mutation_key', ), + 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'max_commit_delay', 'request_options', 'precommit_token', ), + 'create_session': ('database', 'session', ), + 'delete_session': ('name', ), + 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', 'last_statements', ), + 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), + 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), + 'get_session': ('name', ), + 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), + 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), + 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), + 'rollback': ('session', 'transaction_id', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spannerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner/v1/setup.py b/owl-bot-staging/spanner/v1/setup.py new file mode 100644 index 0000000000..41d728098c --- /dev/null +++ b/owl-bot-staging/spanner/v1/setup.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-spanner' + + +description = "Google Cloud Spanner API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/spanner/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt new file mode 100644 index 0000000000..1e93c60e50 --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt @@ -0,0 +1,12 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt new file mode 100644 index 0000000000..1e93c60e50 --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt @@ -0,0 +1,12 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..5d29dea386 --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt @@ -0,0 +1,12 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/tests/__init__.py b/owl-bot-staging/spanner/v1/tests/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py new file mode 100644 index 0000000000..b90788ab0f --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py @@ -0,0 +1,11446 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient +from google.cloud.spanner_v1.services.spanner import SpannerClient +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.services.spanner import transports +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import keys +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SpannerClient._get_default_mtls_endpoint(None) is None + assert SpannerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert SpannerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SpannerClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SpannerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + SpannerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SpannerClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SpannerClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SpannerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SpannerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SpannerClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SpannerClient._get_client_cert_source(None, False) is None + assert SpannerClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert SpannerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert SpannerClient._get_client_cert_source(None, True) is mock_default_cert_source + assert SpannerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SpannerClient._DEFAULT_UNIVERSE + default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert SpannerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == SpannerClient.DEFAULT_MTLS_ENDPOINT + assert SpannerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert SpannerClient._get_api_endpoint(None, None, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT + assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT + assert SpannerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert SpannerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + SpannerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert SpannerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert SpannerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert SpannerClient._get_universe_domain(None, None) == SpannerClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + SpannerClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SpannerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SpannerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (SpannerClient, "grpc"), + (SpannerAsyncClient, "grpc_asyncio"), + (SpannerClient, "rest"), +]) +def test_spanner_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SpannerGrpcTransport, "grpc"), + (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SpannerRestTransport, "rest"), +]) +def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SpannerClient, "grpc"), + (SpannerAsyncClient, "grpc_asyncio"), + (SpannerClient, "rest"), +]) +def test_spanner_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +def test_spanner_client_get_transport_class(): + transport = SpannerClient.get_transport_class() + available_transports = [ + transports.SpannerGrpcTransport, + transports.SpannerRestTransport, + ] + assert transport in available_transports + + transport = SpannerClient.get_transport_class("grpc") + assert transport == transports.SpannerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), + (SpannerClient, transports.SpannerRestTransport, "rest"), +]) +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +def test_spanner_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", "true"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (SpannerClient, transports.SpannerGrpcTransport, "grpc", "false"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (SpannerClient, transports.SpannerRestTransport, "rest", "true"), + (SpannerClient, transports.SpannerRestTransport, "rest", "false"), +]) +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_spanner_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SpannerClient, SpannerAsyncClient +]) +@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) +def test_spanner_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + SpannerClient, SpannerAsyncClient +]) +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +def test_spanner_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SpannerClient._DEFAULT_UNIVERSE + default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), + (SpannerClient, transports.SpannerRestTransport, "rest"), +]) +def test_spanner_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (SpannerClient, transports.SpannerRestTransport, "rest", None), +]) +def test_spanner_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_spanner_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = SpannerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_spanner_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + scopes=None, + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.CreateSessionRequest, + dict, +]) +def test_create_session(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + response = client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.CreateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +def test_create_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.CreateSessionRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CreateSessionRequest( + database='database_value', + ) + +def test_create_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_session] = mock_rpc + request = {} + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_session in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_session] = mock_rpc + + request = {} + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_session_async(transport: str = 'grpc_asyncio', request_type=spanner.CreateSessionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + response = await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.CreateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.asyncio +async def test_create_session_async_from_dict(): + await test_create_session_async(request_type=dict) + +def test_create_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CreateSessionRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = spanner.Session() + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CreateSessionRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_create_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_session( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_create_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_create_session_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_session( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchCreateSessionsRequest, + dict, +]) +def test_batch_create_sessions(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse( + ) + response = client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.BatchCreateSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +def test_batch_create_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.BatchCreateSessionsRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_create_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchCreateSessionsRequest( + database='database_value', + ) + +def test_batch_create_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_create_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc + request = {} + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_create_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_create_sessions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_create_sessions] = mock_rpc + + request = {} + await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_create_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_create_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchCreateSessionsRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( + )) + response = await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.BatchCreateSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +@pytest.mark.asyncio +async def test_batch_create_sessions_async_from_dict(): + await test_batch_create_sessions_async(request_type=dict) + +def test_batch_create_sessions_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchCreateSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = spanner.BatchCreateSessionsResponse() + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_create_sessions_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchCreateSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) + await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_batch_create_sessions_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_sessions( + database='database_value', + session_count=1420, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].session_count + mock_val = 1420 + assert arg == mock_val + + +def test_batch_create_sessions_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + +@pytest.mark.asyncio +async def test_batch_create_sessions_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_sessions( + database='database_value', + session_count=1420, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].session_count + mock_val = 1420 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_batch_create_sessions_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.GetSessionRequest, + dict, +]) +def test_get_session(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + response = client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.GetSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +def test_get_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.GetSessionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.GetSessionRequest( + name='name_value', + ) + +def test_get_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_session] = mock_rpc + request = {} + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_session in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_session] = mock_rpc + + request = {} + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_session_async(transport: str = 'grpc_asyncio', request_type=spanner.GetSessionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + response = await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.GetSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.asyncio +async def test_get_session_async_from_dict(): + await test_get_session_async(request_type=dict) + +def test_get_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.GetSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = spanner.Session() + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.GetSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_session_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.ListSessionsRequest, + dict, +]) +def test_list_sessions(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ListSessionsRequest( + database='database_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ListSessionsRequest( + database='database_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_sessions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc + + request = {} + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.ListSessionsRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + +def test_list_sessions_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ListSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = spanner.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ListSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_list_sessions_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_list_sessions_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + + +def test_list_sessions_pager(transport_name: str = "grpc"): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('database', ''), + )), + ) + pager = client.list_sessions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner.Session) + for i in results) +def test_list_sessions_pages(transport_name: str = "grpc"): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sessions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner.Session) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sessions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner.DeleteSessionRequest, + dict, +]) +def test_delete_session(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.DeleteSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.DeleteSessionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.DeleteSessionRequest( + name='name_value', + ) + +def test_delete_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc + request = {} + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_session in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_session] = mock_rpc + + request = {} + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_session_async(transport: str = 'grpc_asyncio', request_type=spanner.DeleteSessionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.DeleteSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_session_async_from_dict(): + await test_delete_session_async(request_type=dict) + +def test_delete_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.DeleteSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = None + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.DeleteSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_session_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_sql(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = result_set.ResultSet( + ) + response = client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +def test_execute_sql_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.execute_sql(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + +def test_execute_sql_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc + request = {} + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.execute_sql in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.execute_sql] = mock_rpc + + request = {} + await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + response = await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.asyncio +async def test_execute_sql_async_from_dict(): + await test_execute_sql_async(request_type=dict) + +def test_execute_sql_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_sql_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) + await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_streaming_sql(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([result_set.PartialResultSet()]) + response = client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, result_set.PartialResultSet) + + +def test_execute_streaming_sql_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.execute_streaming_sql(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + +def test_execute_streaming_sql_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_streaming_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc + request = {} + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_streaming_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.execute_streaming_sql in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.execute_streaming_sql] = mock_rpc + + request = {} + await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_streaming_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + response = await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, result_set.PartialResultSet) + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async_from_dict(): + await test_execute_streaming_sql_async(request_type=dict) + +def test_execute_streaming_sql_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteBatchDmlRequest, + dict, +]) +def test_execute_batch_dml(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ExecuteBatchDmlResponse( + ) + response = client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteBatchDmlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +def test_execute_batch_dml_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ExecuteBatchDmlRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.execute_batch_dml(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteBatchDmlRequest( + session='session_value', + ) + +def test_execute_batch_dml_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_batch_dml in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc + request = {} + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_batch_dml(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_batch_dml_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.execute_batch_dml in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.execute_batch_dml] = mock_rpc + + request = {} + await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_batch_dml(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_batch_dml_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteBatchDmlRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( + )) + response = await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteBatchDmlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +@pytest.mark.asyncio +async def test_execute_batch_dml_async_from_dict(): + await test_execute_batch_dml_async(request_type=dict) + +def test_execute_batch_dml_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteBatchDmlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = spanner.ExecuteBatchDmlResponse() + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_batch_dml_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteBatchDmlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse()) + await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_read(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = result_set.ResultSet( + ) + response = client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +def test_read_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.read(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + +def test_read_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.read] = mock_rpc + request = {} + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.read in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.read] = mock_rpc + + request = {} + await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + response = await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.asyncio +async def test_read_async_from_dict(): + await test_read_async(request_type=dict) + +def test_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) + await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_streaming_read(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([result_set.PartialResultSet()]) + response = client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, result_set.PartialResultSet) + + +def test_streaming_read_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.streaming_read(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + +def test_streaming_read_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.streaming_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc + request = {} + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_streaming_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.streaming_read in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.streaming_read] = mock_rpc + + request = {} + await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.streaming_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_streaming_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + response = await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, result_set.PartialResultSet) + + +@pytest.mark.asyncio +async def test_streaming_read_async_from_dict(): + await test_streaming_read_async(request_type=dict) + +def test_streaming_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_streaming_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.BeginTransactionRequest, + dict, +]) +def test_begin_transaction(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction( + id=b'id_blob', + ) + response = client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.BeginTransactionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +def test_begin_transaction_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.BeginTransactionRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.begin_transaction(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BeginTransactionRequest( + session='session_value', + ) + +def test_begin_transaction_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_begin_transaction_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.begin_transaction in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.begin_transaction] = mock_rpc + + request = {} + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=spanner.BeginTransactionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( + id=b'id_blob', + )) + response = await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.BeginTransactionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +@pytest.mark.asyncio +async def test_begin_transaction_async_from_dict(): + await test_begin_transaction_async(request_type=dict) + +def test_begin_transaction_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BeginTransactionRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = transaction.Transaction() + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_begin_transaction_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BeginTransactionRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_begin_transaction_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.begin_transaction( + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].options + mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + assert arg == mock_val + + +def test_begin_transaction_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.begin_transaction( + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].options + mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.CommitRequest, + dict, +]) +def test_commit(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse( + ) + response = client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.CommitRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +def test_commit_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.CommitRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.commit(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CommitRequest( + session='session_value', + ) + +def test_commit_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.commit in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.commit] = mock_rpc + + request = {} + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_commit_async(transport: str = 'grpc_asyncio', request_type=spanner.CommitRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( + )) + response = await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.CommitRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +@pytest.mark.asyncio +async def test_commit_async_from_dict(): + await test_commit_async(request_type=dict) + +def test_commit_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CommitRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = commit_response.CommitResponse() + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_commit_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CommitRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_commit_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit( + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutations + mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] + assert arg == mock_val + assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + + +def test_commit_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + +@pytest.mark.asyncio +async def test_commit_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit( + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutations + mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] + assert arg == mock_val + assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + +@pytest.mark.asyncio +async def test_commit_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.RollbackRequest, + dict, +]) +def test_rollback(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.RollbackRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_rollback_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.RollbackRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.rollback(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.RollbackRequest( + session='session_value', + ) + +def test_rollback_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.rollback in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.rollback] = mock_rpc + + request = {} + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=spanner.RollbackRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.RollbackRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_rollback_async_from_dict(): + await test_rollback_async(request_type=dict) + +def test_rollback_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.RollbackRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = None + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rollback_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.RollbackRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_rollback_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].transaction_id + mock_val = b'transaction_id_blob' + assert arg == mock_val + + +def test_rollback_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + +@pytest.mark.asyncio +async def test_rollback_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].transaction_id + mock_val = b'transaction_id_blob' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rollback_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionQueryRequest, + dict, +]) +def test_partition_query(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.PartitionResponse( + ) + response = client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.PartitionQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +def test_partition_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.PartitionQueryRequest( + session='session_value', + sql='sql_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.partition_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionQueryRequest( + session='session_value', + sql='sql_value', + ) + +def test_partition_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc + request = {} + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.partition_query in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.partition_query] = mock_rpc + + request = {} + await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionQueryRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + response = await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.PartitionQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.asyncio +async def test_partition_query_async_from_dict(): + await test_partition_query_async(request_type=dict) + +def test_partition_query_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionQueryRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_query_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionQueryRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) + await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionReadRequest, + dict, +]) +def test_partition_read(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.PartitionResponse( + ) + response = client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.PartitionReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +def test_partition_read_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.PartitionReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.partition_read(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + +def test_partition_read_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc + request = {} + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.partition_read in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.partition_read] = mock_rpc + + request = {} + await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.partition_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_read_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionReadRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + response = await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.PartitionReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.asyncio +async def test_partition_read_async_from_dict(): + await test_partition_read_async(request_type=dict) + +def test_partition_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) + await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchWriteRequest, + dict, +]) +def test_batch_write(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + response = client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.BatchWriteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, spanner.BatchWriteResponse) + + +def test_batch_write_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.BatchWriteRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_write(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchWriteRequest( + session='session_value', + ) + +def test_batch_write_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_write in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc + request = {} + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_write(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_write_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_write in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_write] = mock_rpc + + request = {} + await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_write(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_write_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchWriteRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) + response = await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.BatchWriteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, spanner.BatchWriteResponse) + + +@pytest.mark.asyncio +async def test_batch_write_async_from_dict(): + await test_batch_write_async(request_type=dict) + +def test_batch_write_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchWriteRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = iter([spanner.BatchWriteResponse()]) + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_write_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchWriteRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) + await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_batch_write_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_write( + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutation_groups + mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] + assert arg == mock_val + + +def test_batch_write_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_write( + spanner.BatchWriteRequest(), + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + +@pytest.mark.asyncio +async def test_batch_write_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_write( + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutation_groups + mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_batch_write_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_write( + spanner.BatchWriteRequest(), + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + +def test_create_session_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_session] = mock_rpc + + request = {} + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_session_rest_required_fields(request_type=spanner.CreateSessionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_session(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_session_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "session", ))) + + +def test_create_session_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) + + +def test_create_session_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + + +def test_batch_create_sessions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_create_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc + + request = {} + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_create_sessions_rest_required_fields(request_type=spanner.BatchCreateSessionsRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["database"] = "" + request_init["session_count"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + jsonified_request["sessionCount"] = 1420 + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + assert "sessionCount" in jsonified_request + assert jsonified_request["sessionCount"] == 1420 + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.BatchCreateSessionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_create_sessions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_create_sessions_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_create_sessions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "sessionCount", ))) + + +def test_batch_create_sessions_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchCreateSessionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + session_count=1420, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.batch_create_sessions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate" % client.transport._host, args[1]) + + +def test_batch_create_sessions_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + + +def test_get_session_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_session] = mock_rpc + + request = {} + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_session_rest_required_fields(request_type=spanner.GetSessionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_session(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_session_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_session_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) + + +def test_get_session_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + + +def test_list_sessions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sessions_rest_required_fields(request_type=spanner.ListSessionsRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.ListSessionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sessions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_sessions_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_sessions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("database", ))) + + +def test_list_sessions_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.ListSessionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_sessions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) + + +def test_list_sessions_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + + +def test_list_sessions_rest_pager(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner.ListSessionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + pager = client.list_sessions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner.Session) + for i in results) + + pages = list(client.list_sessions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_session_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc + + request = {} + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_session_rest_required_fields(request_type=spanner.DeleteSessionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_session(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_session_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_session_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) + + +def test_delete_session_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + + +def test_execute_sql_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc + + request = {} + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["sql"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["sql"] = 'sql_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "sql" in jsonified_request + assert jsonified_request["sql"] == 'sql_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.execute_sql(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_execute_sql_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.execute_sql._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "sql", ))) + + +def test_execute_streaming_sql_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_streaming_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc + + request = {} + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_streaming_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_streaming_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["sql"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["sql"] = 'sql_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "sql" in jsonified_request + assert jsonified_request["sql"] == 'sql_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.execute_streaming_sql(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_execute_streaming_sql_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.execute_streaming_sql._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "sql", ))) + + +def test_execute_batch_dml_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_batch_dml in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc + + request = {} + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_batch_dml(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_batch_dml_rest_required_fields(request_type=spanner.ExecuteBatchDmlRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["seqno"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["seqno"] = 550 + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "seqno" in jsonified_request + assert jsonified_request["seqno"] == 550 + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.ExecuteBatchDmlResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.execute_batch_dml(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_execute_batch_dml_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.execute_batch_dml._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "transaction", "statements", "seqno", ))) + + +def test_read_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.read] = mock_rpc + + request = {} + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_read_rest_required_fields(request_type=spanner.ReadRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["table"] = "" + request_init["columns"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["table"] = 'table_value' + jsonified_request["columns"] = 'columns_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "table" in jsonified_request + assert jsonified_request["table"] == 'table_value' + assert "columns" in jsonified_request + assert jsonified_request["columns"] == 'columns_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.read(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_read_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.read._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) + + +def test_streaming_read_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.streaming_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc + + request = {} + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_streaming_read_rest_required_fields(request_type=spanner.ReadRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["table"] = "" + request_init["columns"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["table"] = 'table_value' + jsonified_request["columns"] = 'columns_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "table" in jsonified_request + assert jsonified_request["table"] == 'table_value' + assert "columns" in jsonified_request + assert jsonified_request["columns"] == 'columns_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.streaming_read(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_streaming_read_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.streaming_read._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) + + +def test_begin_transaction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc + + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_begin_transaction_rest_required_fields(request_type=spanner.BeginTransactionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transaction.Transaction() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.begin_transaction(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_begin_transaction_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.begin_transaction._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "options", ))) + + +def test_begin_transaction_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = transaction.Transaction() + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.begin_transaction(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction" % client.transport._host, args[1]) + + +def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +def test_commit_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_commit_rest_required_fields(request_type=spanner.CommitRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = commit_response.CommitResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.commit(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_commit_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.commit._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", ))) + + +def test_commit_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = commit_response.CommitResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.commit(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit" % client.transport._host, args[1]) + + +def test_commit_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +def test_rollback_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_rollback_rest_required_fields(request_type=spanner.RollbackRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["transaction_id"] = b'' + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["transactionId"] = b'transaction_id_blob' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "transactionId" in jsonified_request + assert jsonified_request["transactionId"] == b'transaction_id_blob' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.rollback(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_rollback_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.rollback._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "transactionId", ))) + + +def test_rollback_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.rollback(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback" % client.transport._host, args[1]) + + +def test_rollback_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + +def test_partition_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc + + request = {} + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_partition_query_rest_required_fields(request_type=spanner.PartitionQueryRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["sql"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["sql"] = 'sql_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "sql" in jsonified_request + assert jsonified_request["sql"] == 'sql_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.partition_query(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_partition_query_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.partition_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "sql", ))) + + +def test_partition_read_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc + + request = {} + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_partition_read_rest_required_fields(request_type=spanner.PartitionReadRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["table"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["table"] = 'table_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "table" in jsonified_request + assert jsonified_request["table"] == 'table_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.partition_read(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_partition_read_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.partition_read._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "table", "keySet", ))) + + +def test_batch_write_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_write in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc + + request = {} + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_write(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_write_rest_required_fields(request_type=spanner.BatchWriteRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.batch_write(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_write_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_write._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "mutationGroups", ))) + + +def test_batch_write_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + client.batch_write(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite" % client.transport._host, args[1]) + + +def test_batch_write_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_write( + spanner.BatchWriteRequest(), + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpannerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpannerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SpannerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SpannerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + transports.SpannerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = SpannerClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_session_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = spanner.Session() + client.create_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CreateSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_create_sessions_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = spanner.BatchCreateSessionsResponse() + client.batch_create_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchCreateSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_session_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = spanner.Session() + client.get_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.GetSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = spanner.ListSessionsResponse() + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ListSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_session_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = None + client.delete_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.DeleteSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_streaming_sql_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.execute_streaming_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_batch_dml_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = spanner.ExecuteBatchDmlResponse() + client.execute_batch_dml(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteBatchDmlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_read_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_streaming_read_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.streaming_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = transaction.Transaction() + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = commit_response.CommitResponse() + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = None + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_query_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_read_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_write_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = iter([spanner.BatchWriteResponse()]) + client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchWriteRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SpannerAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_session_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + await client.create_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CreateSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_create_sessions_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( + )) + await client.batch_create_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchCreateSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_session_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + await client.get_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.GetSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sessions_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ListSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_session_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.DeleteSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_sql_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + await client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_streaming_sql_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.execute_streaming_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_batch_dml_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( + )) + await client.execute_batch_dml(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteBatchDmlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_read_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + await client.read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_streaming_read_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.streaming_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( + id=b'id_blob', + )) + await client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( + )) + await client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_partition_query_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + await client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_partition_read_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + await client.partition_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_write_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) + await client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchWriteRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SpannerClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_session_rest_bad_request(request_type=spanner.CreateSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_session(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.CreateSessionRequest, + dict, +]) +def test_create_session_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_session(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_session_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_create_session") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_create_session_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_create_session") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.CreateSessionRequest.pb(spanner.CreateSessionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.Session.to_json(spanner.Session()) + req.return_value.content = return_value + + request = spanner.CreateSessionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.Session() + post_with_metadata.return_value = spanner.Session(), metadata + + client.create_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_create_sessions_rest_bad_request(request_type=spanner.BatchCreateSessionsRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_create_sessions(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchCreateSessionsRequest, + dict, +]) +def test_batch_create_sessions_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchCreateSessionsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_create_sessions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_sessions_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_create_sessions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.BatchCreateSessionsRequest.pb(spanner.BatchCreateSessionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.BatchCreateSessionsResponse.to_json(spanner.BatchCreateSessionsResponse()) + req.return_value.content = return_value + + request = spanner.BatchCreateSessionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.BatchCreateSessionsResponse() + post_with_metadata.return_value = spanner.BatchCreateSessionsResponse(), metadata + + client.batch_create_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_session_rest_bad_request(request_type=spanner.GetSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_session(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.GetSessionRequest, + dict, +]) +def test_get_session_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_session(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_session_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_get_session") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_get_session_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_get_session") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.GetSessionRequest.pb(spanner.GetSessionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.Session.to_json(spanner.Session()) + req.return_value.content = return_value + + request = spanner.GetSessionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.Session() + post_with_metadata.return_value = spanner.Session(), metadata + + client.get_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_sessions_rest_bad_request(request_type=spanner.ListSessionsRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_sessions(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ListSessionsRequest, + dict, +]) +def test_list_sessions_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_sessions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sessions_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_list_sessions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ListSessionsRequest.pb(spanner.ListSessionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.ListSessionsResponse.to_json(spanner.ListSessionsResponse()) + req.return_value.content = return_value + + request = spanner.ListSessionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.ListSessionsResponse() + post_with_metadata.return_value = spanner.ListSessionsResponse(), metadata + + client.list_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_session_rest_bad_request(request_type=spanner.DeleteSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_session(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.DeleteSessionRequest, + dict, +]) +def test_delete_session_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_session(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_session_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_delete_session") as pre: + pre.assert_not_called() + pb_message = spanner.DeleteSessionRequest.pb(spanner.DeleteSessionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner.DeleteSessionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_execute_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_sql(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_sql_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_sql(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_sql_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_sql") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.ResultSet.to_json(result_set.ResultSet()) + req.return_value.content = return_value + + request = spanner.ExecuteSqlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.ResultSet() + post_with_metadata.return_value = result_set.ResultSet(), metadata + + client.execute_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_execute_streaming_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_streaming_sql(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_streaming_sql_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet( + chunked_value=True, + resume_token=b'resume_token_blob', + last=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_streaming_sql(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.PartialResultSet) + assert response.chunked_value is True + assert response.resume_token == b'resume_token_blob' + assert response.last is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_streaming_sql_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_streaming_sql") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = spanner.ExecuteSqlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.PartialResultSet() + post_with_metadata.return_value = result_set.PartialResultSet(), metadata + + client.execute_streaming_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_execute_batch_dml_rest_bad_request(request_type=spanner.ExecuteBatchDmlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_batch_dml(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteBatchDmlRequest, + dict, +]) +def test_execute_batch_dml_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.ExecuteBatchDmlResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_batch_dml(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_batch_dml_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_batch_dml") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ExecuteBatchDmlRequest.pb(spanner.ExecuteBatchDmlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.ExecuteBatchDmlResponse.to_json(spanner.ExecuteBatchDmlResponse()) + req.return_value.content = return_value + + request = spanner.ExecuteBatchDmlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.ExecuteBatchDmlResponse() + post_with_metadata.return_value = spanner.ExecuteBatchDmlResponse(), metadata + + client.execute_batch_dml(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_read_rest_bad_request(request_type=spanner.ReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.read(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_read_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.read(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_read_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_read") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_read_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_read") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.ResultSet.to_json(result_set.ResultSet()) + req.return_value.content = return_value + + request = spanner.ReadRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.ResultSet() + post_with_metadata.return_value = result_set.ResultSet(), metadata + + client.read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_streaming_read_rest_bad_request(request_type=spanner.ReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.streaming_read(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_streaming_read_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet( + chunked_value=True, + resume_token=b'resume_token_blob', + last=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.streaming_read(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.PartialResultSet) + assert response.chunked_value is True + assert response.resume_token == b'resume_token_blob' + assert response.last is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_streaming_read_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_streaming_read") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = spanner.ReadRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.PartialResultSet() + post_with_metadata.return_value = result_set.PartialResultSet(), metadata + + client.streaming_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_begin_transaction_rest_bad_request(request_type=spanner.BeginTransactionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.begin_transaction(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.BeginTransactionRequest, + dict, +]) +def test_begin_transaction_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = transaction.Transaction( + id=b'id_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.begin_transaction(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_begin_transaction_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_begin_transaction") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.BeginTransactionRequest.pb(spanner.BeginTransactionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = transaction.Transaction.to_json(transaction.Transaction()) + req.return_value.content = return_value + + request = spanner.BeginTransactionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transaction.Transaction() + post_with_metadata.return_value = transaction.Transaction(), metadata + + client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_commit_rest_bad_request(request_type=spanner.CommitRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.commit(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.CommitRequest, + dict, +]) +def test_commit_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = commit_response.CommitResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.commit(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_commit") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_commit_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_commit") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.CommitRequest.pb(spanner.CommitRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = commit_response.CommitResponse.to_json(commit_response.CommitResponse()) + req.return_value.content = return_value + + request = spanner.CommitRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = commit_response.CommitResponse() + post_with_metadata.return_value = commit_response.CommitResponse(), metadata + + client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_rollback_rest_bad_request(request_type=spanner.RollbackRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.rollback(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.RollbackRequest, + dict, +]) +def test_rollback_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.rollback(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_rollback") as pre: + pre.assert_not_called() + pb_message = spanner.RollbackRequest.pb(spanner.RollbackRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner.RollbackRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_partition_query_rest_bad_request(request_type=spanner.PartitionQueryRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.partition_query(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionQueryRequest, + dict, +]) +def test_partition_query_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.partition_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_partition_query_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_query") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.PartitionQueryRequest.pb(spanner.PartitionQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) + req.return_value.content = return_value + + request = spanner.PartitionQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.PartitionResponse() + post_with_metadata.return_value = spanner.PartitionResponse(), metadata + + client.partition_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_partition_read_rest_bad_request(request_type=spanner.PartitionReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.partition_read(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionReadRequest, + dict, +]) +def test_partition_read_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.partition_read(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_partition_read_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_read") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.PartitionReadRequest.pb(spanner.PartitionReadRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) + req.return_value.content = return_value + + request = spanner.PartitionReadRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.PartitionResponse() + post_with_metadata.return_value = spanner.PartitionResponse(), metadata + + client.partition_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_write_rest_bad_request(request_type=spanner.BatchWriteRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_write(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchWriteRequest, + dict, +]) +def test_batch_write_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse( + indexes=[752], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_write(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchWriteResponse) + assert response.indexes == [752] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_write_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_write") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.BatchWriteRequest.pb(spanner.BatchWriteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.BatchWriteResponse.to_json(spanner.BatchWriteResponse()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = spanner.BatchWriteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.BatchWriteResponse() + post_with_metadata.return_value = spanner.BatchWriteResponse(), metadata + + client.batch_write(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +def test_initialize_client_w_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_session_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + client.create_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CreateSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_create_sessions_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + client.batch_create_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchCreateSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_session_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + client.get_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.GetSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ListSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_session_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + client.delete_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.DeleteSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_streaming_sql_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + client.execute_streaming_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_batch_dml_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + client.execute_batch_dml(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteBatchDmlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_read_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + client.read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_streaming_read_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + client.streaming_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_query_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_read_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + client.partition_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_write_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchWriteRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SpannerGrpcTransport, + ) + +def test_spanner_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SpannerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_spanner_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SpannerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_session', + 'batch_create_sessions', + 'get_session', + 'list_sessions', + 'delete_session', + 'execute_sql', + 'execute_streaming_sql', + 'execute_batch_dml', + 'read', + 'streaming_read', + 'begin_transaction', + 'commit', + 'rollback', + 'partition_query', + 'partition_read', + 'batch_write', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_spanner_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpannerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + quota_project_id="octopus", + ) + + +def test_spanner_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpannerTransport() + adc.assert_called_once() + + +def test_spanner_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SpannerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + ], +) +def test_spanner_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.data',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + transports.SpannerRestTransport, + ], +) +def test_spanner_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SpannerGrpcTransport, grpc_helpers), + (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_spanner_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_spanner_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SpannerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_spanner_host_no_port(transport_name): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_spanner_host_with_port(transport_name): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_spanner_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SpannerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SpannerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_session._session + session2 = client2.transport.create_session._session + assert session1 != session2 + session1 = client1.transport.batch_create_sessions._session + session2 = client2.transport.batch_create_sessions._session + assert session1 != session2 + session1 = client1.transport.get_session._session + session2 = client2.transport.get_session._session + assert session1 != session2 + session1 = client1.transport.list_sessions._session + session2 = client2.transport.list_sessions._session + assert session1 != session2 + session1 = client1.transport.delete_session._session + session2 = client2.transport.delete_session._session + assert session1 != session2 + session1 = client1.transport.execute_sql._session + session2 = client2.transport.execute_sql._session + assert session1 != session2 + session1 = client1.transport.execute_streaming_sql._session + session2 = client2.transport.execute_streaming_sql._session + assert session1 != session2 + session1 = client1.transport.execute_batch_dml._session + session2 = client2.transport.execute_batch_dml._session + assert session1 != session2 + session1 = client1.transport.read._session + session2 = client2.transport.read._session + assert session1 != session2 + session1 = client1.transport.streaming_read._session + session2 = client2.transport.streaming_read._session + assert session1 != session2 + session1 = client1.transport.begin_transaction._session + session2 = client2.transport.begin_transaction._session + assert session1 != session2 + session1 = client1.transport.commit._session + session2 = client2.transport.commit._session + assert session1 != session2 + session1 = client1.transport.rollback._session + session2 = client2.transport.rollback._session + assert session1 != session2 + session1 = client1.transport.partition_query._session + session2 = client2.transport.partition_query._session + assert session1 != session2 + session1 = client1.transport.partition_read._session + session2 = client2.transport.partition_read._session + assert session1 != session2 + session1 = client1.transport.batch_write._session + session2 = client2.transport.batch_write._session + assert session1 != session2 +def test_spanner_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpannerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_spanner_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpannerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_database_path(): + project = "squid" + instance = "clam" + database = "whelk" + expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + actual = SpannerClient.database_path(project, instance, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "database": "nudibranch", + } + path = SpannerClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_database_path(path) + assert expected == actual + +def test_session_path(): + project = "cuttlefish" + instance = "mussel" + database = "winkle" + session = "nautilus" + expected = "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) + actual = SpannerClient.session_path(project, instance, database, session) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "scallop", + "instance": "abalone", + "database": "squid", + "session": "clam", + } + path = SpannerClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_session_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SpannerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SpannerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = SpannerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SpannerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SpannerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SpannerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = SpannerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SpannerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SpannerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SpannerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: + transport_class = SpannerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SpannerClient, transports.SpannerGrpcTransport), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/spanner_admin_database/v1/.coveragerc b/owl-bot-staging/spanner_admin_database/v1/.coveragerc new file mode 100644 index 0000000000..6085d54da4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner_admin_database/__init__.py + google/cloud/spanner_admin_database/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/spanner_admin_database/v1/.flake8 b/owl-bot-staging/spanner_admin_database/v1/.flake8 new file mode 100644 index 0000000000..90316de214 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/spanner_admin_database/v1/LICENSE b/owl-bot-staging/spanner_admin_database/v1/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in new file mode 100644 index 0000000000..dae249ec89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_database/v1/README.rst b/owl-bot-staging/spanner_admin_database/v1/README.rst new file mode 100644 index 0000000000..5cfc4ab969 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Spanner Admin Database API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner Admin Database API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css new file mode 100644 index 0000000000..b0a295464b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html new file mode 100644 index 0000000000..95e9c77fcf --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py new file mode 100644 index 0000000000..4b16cc5e97 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner-admin-database documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner-admin-database" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-database", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-admin-database-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-database.tex", + u"google-cloud-spanner-admin-database Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-spanner-admin-database", + "google-cloud-spanner-admin-database Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-database", + "google-cloud-spanner-admin-database Documentation", + author, + "google-cloud-spanner-admin-database", + "google-cloud-spanner-admin-database Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst new file mode 100644 index 0000000000..6f79e3f3d4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_admin_database_v1/services_ + spanner_admin_database_v1/types_ diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst new file mode 100644 index 0000000000..536d17b2ea --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst new file mode 100644 index 0000000000..bd6aab00e4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst @@ -0,0 +1,10 @@ +DatabaseAdmin +------------------------------- + +.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst new file mode 100644 index 0000000000..55e57d8dc0 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner Admin Database v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + database_admin diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst new file mode 100644 index 0000000000..fe6c27778b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Spanner Admin Database v1 API +==================================================== + +.. automodule:: google.cloud.spanner_admin_database_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py new file mode 100644 index 0000000000..aa445b5622 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_database import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.spanner_admin_database_v1.services.database_admin.client import DatabaseAdminClient +from google.cloud.spanner_admin_database_v1.services.database_admin.async_client import DatabaseAdminAsyncClient + +from google.cloud.spanner_admin_database_v1.types.backup import Backup +from google.cloud.spanner_admin_database_v1.types.backup import BackupInfo +from google.cloud.spanner_admin_database_v1.types.backup import BackupInstancePartition +from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupMetadata +from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupMetadata +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import DeleteBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import FullBackupSpec +from google.cloud.spanner_admin_database_v1.types.backup import GetBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import IncrementalBackupSpec +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsResponse +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsResponse +from google.cloud.spanner_admin_database_v1.types.backup import UpdateBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupSchedule +from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupScheduleSpec +from google.cloud.spanner_admin_database_v1.types.backup_schedule import CreateBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import CrontabSpec +from google.cloud.spanner_admin_database_v1.types.backup_schedule import DeleteBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import GetBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesResponse +from google.cloud.spanner_admin_database_v1.types.backup_schedule import UpdateBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.common import EncryptionConfig +from google.cloud.spanner_admin_database_v1.types.common import EncryptionInfo +from google.cloud.spanner_admin_database_v1.types.common import OperationProgress +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import Database +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DatabaseRole +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DdlStatementActionInfo +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DropDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import OptimizeRestoredDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreInfo +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import SplitPoints +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreSourceType + +__all__ = ('DatabaseAdminClient', + 'DatabaseAdminAsyncClient', + 'Backup', + 'BackupInfo', + 'BackupInstancePartition', + 'CopyBackupEncryptionConfig', + 'CopyBackupMetadata', + 'CopyBackupRequest', + 'CreateBackupEncryptionConfig', + 'CreateBackupMetadata', + 'CreateBackupRequest', + 'DeleteBackupRequest', + 'FullBackupSpec', + 'GetBackupRequest', + 'IncrementalBackupSpec', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'UpdateBackupRequest', + 'BackupSchedule', + 'BackupScheduleSpec', + 'CreateBackupScheduleRequest', + 'CrontabSpec', + 'DeleteBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'UpdateBackupScheduleRequest', + 'EncryptionConfig', + 'EncryptionInfo', + 'OperationProgress', + 'DatabaseDialect', + 'AddSplitPointsRequest', + 'AddSplitPointsResponse', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'Database', + 'DatabaseRole', + 'DdlStatementActionInfo', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'GetDatabaseRequest', + 'InternalUpdateGraphOperationRequest', + 'InternalUpdateGraphOperationResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'ListDatabaseRolesRequest', + 'ListDatabaseRolesResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'OptimizeRestoredDatabaseMetadata', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'RestoreDatabaseRequest', + 'RestoreInfo', + 'SplitPoints', + 'UpdateDatabaseDdlMetadata', + 'UpdateDatabaseDdlRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseRequest', + 'RestoreSourceType', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed new file mode 100644 index 0000000000..29f334aad6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py new file mode 100644 index 0000000000..6605eff74d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.database_admin import DatabaseAdminClient +from .services.database_admin import DatabaseAdminAsyncClient + +from .types.backup import Backup +from .types.backup import BackupInfo +from .types.backup import BackupInstancePartition +from .types.backup import CopyBackupEncryptionConfig +from .types.backup import CopyBackupMetadata +from .types.backup import CopyBackupRequest +from .types.backup import CreateBackupEncryptionConfig +from .types.backup import CreateBackupMetadata +from .types.backup import CreateBackupRequest +from .types.backup import DeleteBackupRequest +from .types.backup import FullBackupSpec +from .types.backup import GetBackupRequest +from .types.backup import IncrementalBackupSpec +from .types.backup import ListBackupOperationsRequest +from .types.backup import ListBackupOperationsResponse +from .types.backup import ListBackupsRequest +from .types.backup import ListBackupsResponse +from .types.backup import UpdateBackupRequest +from .types.backup_schedule import BackupSchedule +from .types.backup_schedule import BackupScheduleSpec +from .types.backup_schedule import CreateBackupScheduleRequest +from .types.backup_schedule import CrontabSpec +from .types.backup_schedule import DeleteBackupScheduleRequest +from .types.backup_schedule import GetBackupScheduleRequest +from .types.backup_schedule import ListBackupSchedulesRequest +from .types.backup_schedule import ListBackupSchedulesResponse +from .types.backup_schedule import UpdateBackupScheduleRequest +from .types.common import EncryptionConfig +from .types.common import EncryptionInfo +from .types.common import OperationProgress +from .types.common import DatabaseDialect +from .types.spanner_database_admin import AddSplitPointsRequest +from .types.spanner_database_admin import AddSplitPointsResponse +from .types.spanner_database_admin import CreateDatabaseMetadata +from .types.spanner_database_admin import CreateDatabaseRequest +from .types.spanner_database_admin import Database +from .types.spanner_database_admin import DatabaseRole +from .types.spanner_database_admin import DdlStatementActionInfo +from .types.spanner_database_admin import DropDatabaseRequest +from .types.spanner_database_admin import GetDatabaseDdlRequest +from .types.spanner_database_admin import GetDatabaseDdlResponse +from .types.spanner_database_admin import GetDatabaseRequest +from .types.spanner_database_admin import InternalUpdateGraphOperationRequest +from .types.spanner_database_admin import InternalUpdateGraphOperationResponse +from .types.spanner_database_admin import ListDatabaseOperationsRequest +from .types.spanner_database_admin import ListDatabaseOperationsResponse +from .types.spanner_database_admin import ListDatabaseRolesRequest +from .types.spanner_database_admin import ListDatabaseRolesResponse +from .types.spanner_database_admin import ListDatabasesRequest +from .types.spanner_database_admin import ListDatabasesResponse +from .types.spanner_database_admin import OptimizeRestoredDatabaseMetadata +from .types.spanner_database_admin import RestoreDatabaseEncryptionConfig +from .types.spanner_database_admin import RestoreDatabaseMetadata +from .types.spanner_database_admin import RestoreDatabaseRequest +from .types.spanner_database_admin import RestoreInfo +from .types.spanner_database_admin import SplitPoints +from .types.spanner_database_admin import UpdateDatabaseDdlMetadata +from .types.spanner_database_admin import UpdateDatabaseDdlRequest +from .types.spanner_database_admin import UpdateDatabaseMetadata +from .types.spanner_database_admin import UpdateDatabaseRequest +from .types.spanner_database_admin import RestoreSourceType + +__all__ = ( + 'DatabaseAdminAsyncClient', +'AddSplitPointsRequest', +'AddSplitPointsResponse', +'Backup', +'BackupInfo', +'BackupInstancePartition', +'BackupSchedule', +'BackupScheduleSpec', +'CopyBackupEncryptionConfig', +'CopyBackupMetadata', +'CopyBackupRequest', +'CreateBackupEncryptionConfig', +'CreateBackupMetadata', +'CreateBackupRequest', +'CreateBackupScheduleRequest', +'CreateDatabaseMetadata', +'CreateDatabaseRequest', +'CrontabSpec', +'Database', +'DatabaseAdminClient', +'DatabaseDialect', +'DatabaseRole', +'DdlStatementActionInfo', +'DeleteBackupRequest', +'DeleteBackupScheduleRequest', +'DropDatabaseRequest', +'EncryptionConfig', +'EncryptionInfo', +'FullBackupSpec', +'GetBackupRequest', +'GetBackupScheduleRequest', +'GetDatabaseDdlRequest', +'GetDatabaseDdlResponse', +'GetDatabaseRequest', +'IncrementalBackupSpec', +'InternalUpdateGraphOperationRequest', +'InternalUpdateGraphOperationResponse', +'ListBackupOperationsRequest', +'ListBackupOperationsResponse', +'ListBackupSchedulesRequest', +'ListBackupSchedulesResponse', +'ListBackupsRequest', +'ListBackupsResponse', +'ListDatabaseOperationsRequest', +'ListDatabaseOperationsResponse', +'ListDatabaseRolesRequest', +'ListDatabaseRolesResponse', +'ListDatabasesRequest', +'ListDatabasesResponse', +'OperationProgress', +'OptimizeRestoredDatabaseMetadata', +'RestoreDatabaseEncryptionConfig', +'RestoreDatabaseMetadata', +'RestoreDatabaseRequest', +'RestoreInfo', +'RestoreSourceType', +'SplitPoints', +'UpdateBackupRequest', +'UpdateBackupScheduleRequest', +'UpdateDatabaseDdlMetadata', +'UpdateDatabaseDdlRequest', +'UpdateDatabaseMetadata', +'UpdateDatabaseRequest', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json new file mode 100644 index 0000000000..027a4f612b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json @@ -0,0 +1,433 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_admin_database_v1", + "protoPackage": "google.spanner.admin.database.v1", + "schema": "1.0", + "services": { + "DatabaseAdmin": { + "clients": { + "grpc": { + "libraryClient": "DatabaseAdminClient", + "rpcs": { + "AddSplitPoints": { + "methods": [ + "add_split_points" + ] + }, + "CopyBackup": { + "methods": [ + "copy_backup" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "InternalUpdateGraphOperation": { + "methods": [ + "internal_update_graph_operation" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabaseRoles": { + "methods": [ + "list_database_roles" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DatabaseAdminAsyncClient", + "rpcs": { + "AddSplitPoints": { + "methods": [ + "add_split_points" + ] + }, + "CopyBackup": { + "methods": [ + "copy_backup" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "InternalUpdateGraphOperation": { + "methods": [ + "internal_update_graph_operation" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabaseRoles": { + "methods": [ + "list_database_roles" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + }, + "rest": { + "libraryClient": "DatabaseAdminClient", + "rpcs": { + "AddSplitPoints": { + "methods": [ + "add_split_points" + ] + }, + "CopyBackup": { + "methods": [ + "copy_backup" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "InternalUpdateGraphOperation": { + "methods": [ + "internal_update_graph_operation" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabaseRoles": { + "methods": [ + "list_database_roles" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed new file mode 100644 index 0000000000..29f334aad6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py new file mode 100644 index 0000000000..cbf94b283c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py new file mode 100644 index 0000000000..85e225bbd8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DatabaseAdminClient +from .async_client import DatabaseAdminAsyncClient + +__all__ = ( + 'DatabaseAdminClient', + 'DatabaseAdminAsyncClient', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py new file mode 100644 index 0000000000..b5e733e3a6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -0,0 +1,3968 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +import uuid + +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .client import DatabaseAdminClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class DatabaseAdminAsyncClient: + """Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + """ + + _client: DatabaseAdminClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DatabaseAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DatabaseAdminClient._DEFAULT_UNIVERSE + + backup_path = staticmethod(DatabaseAdminClient.backup_path) + parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) + backup_schedule_path = staticmethod(DatabaseAdminClient.backup_schedule_path) + parse_backup_schedule_path = staticmethod(DatabaseAdminClient.parse_backup_schedule_path) + crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) + crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) + parse_crypto_key_version_path = staticmethod(DatabaseAdminClient.parse_crypto_key_version_path) + database_path = staticmethod(DatabaseAdminClient.database_path) + parse_database_path = staticmethod(DatabaseAdminClient.parse_database_path) + database_role_path = staticmethod(DatabaseAdminClient.database_role_path) + parse_database_role_path = staticmethod(DatabaseAdminClient.parse_database_role_path) + instance_path = staticmethod(DatabaseAdminClient.instance_path) + parse_instance_path = staticmethod(DatabaseAdminClient.parse_instance_path) + instance_partition_path = staticmethod(DatabaseAdminClient.instance_partition_path) + parse_instance_partition_path = staticmethod(DatabaseAdminClient.parse_instance_partition_path) + common_billing_account_path = staticmethod(DatabaseAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DatabaseAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DatabaseAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(DatabaseAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(DatabaseAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(DatabaseAdminClient.parse_common_organization_path) + common_project_path = staticmethod(DatabaseAdminClient.common_project_path) + parse_common_project_path = staticmethod(DatabaseAdminClient.parse_common_project_path) + common_location_path = staticmethod(DatabaseAdminClient.common_location_path) + parse_common_location_path = staticmethod(DatabaseAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DatabaseAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DatabaseAdminTransport: + """Returns the transport used by the client instance. + + Returns: + DatabaseAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DatabaseAdminClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the database admin async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatabaseAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DatabaseAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.database_v1.DatabaseAdminAsyncClient`.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "credentialsType": None, + } + ) + + async def list_databases(self, + request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabasesAsyncPager: + r"""Lists Cloud Spanner databases. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]]): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + parent (:class:`str`): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabasesRequest): + request = spanner_database_admin.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabasesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_database(self, + request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + create_statement: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]]): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + parent (:class:`str`): + Required. The name of the instance that will serve the + new database. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + create_statement (:class:`str`): + Required. A ``CREATE DATABASE`` statement, which + specifies the ID of the new database. The database ID + must conform to the regular expression + ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 + characters in length. If the database ID is a reserved + word or if it contains a hyphen, the database ID must be + enclosed in backticks (:literal:`\``). + + This corresponds to the ``create_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, create_statement] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): + request = spanner_database_admin.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if create_statement is not None: + request.create_statement = create_statement + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def get_database(self, + request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.Database: + r"""Gets the state of a Cloud Spanner database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]]): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + name (:class:`str`): + Required. The name of the requested database. Values are + of the form + ``projects//instances//databases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Database: + A Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseRequest): + request = spanner_database_admin.GetDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_database(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[spanner_database_admin.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]]): + The request object. The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + database (:class:`google.cloud.spanner_admin_database_v1.types.Database`): + Required. The database to update. The ``name`` field of + the database is of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, only + ``enable_drop_protection`` field can be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): + request = spanner_database_admin.UpdateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database.name", request.database.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.UpdateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def update_database_ddl(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + statements: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]]): + The request object. Enqueues the given DDL statements to be applied, in + order but not necessarily all at once, to the database + schema at some point (or points) in the future. The + server checks that the statements are executable + (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + ``NULL`` value in a column to which ``NOT NULL`` would + be added). If a statement fails, all subsequent + statements in the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + database (:class:`str`): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + statements (:class:`MutableSequence[str]`): + Required. DDL statements to be + applied to the database. + + This corresponds to the ``statements`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, statements] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): + request = spanner_database_admin.UpdateDatabaseDdlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if statements: + request.statements.extend(statements) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, + ) + + # Done; return the response. + return response + + async def drop_database(self, + request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + await client.drop_database(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]]): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + database (:class:`str`): + Required. The database to be dropped. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.DropDatabaseRequest): + request = spanner_database_admin.DropDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.drop_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_database_ddl(self, + request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = await client.get_database_ddl(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]]): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + database (:class:`str`): + Required. The database whose schema we wish to get. + Values are of the form + ``projects//instances//databases/`` + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): + request = spanner_database_admin.GetDatabaseDdlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup(self, + request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[gsad_backup.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]]): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + parent (:class:`str`): + Required. The name of the instance in which the backup + will be created. This must be the same instance that + contains the database the backup will be created from. + The backup will be stored in the location(s) specified + in the instance configuration of this instance. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): + Required. The backup to create. + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full + backup name of the form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup, backup_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.CreateBackupRequest): + request = gsad_backup.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gsad_backup.Backup, + metadata_type=gsad_backup.CreateBackupMetadata, + ) + + # Done; return the response. + return response + + async def copy_backup(self, + request: Optional[Union[backup.CopyBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_id: Optional[str] = None, + source_backup: Optional[str] = None, + expire_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]]): + The request object. The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + parent (:class:`str`): + Required. The name of the destination instance that will + contain the backup copy. Values are of the form: + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The id of the backup copy. The ``backup_id`` + appended to ``parent`` forms the full backup_uri of the + form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source_backup (:class:`str`): + Required. The source backup to be copied. The source + backup needs to be in READY state for it to be copied. + Once CopyBackup is in progress, the source backup cannot + be deleted or cleaned up on expiration until CopyBackup + is finished. Values are of the form: + ``projects//instances//backups/``. + + This corresponds to the ``source_backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + expire_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The expiration time of the backup in + microsecond granularity. The expiration time must be at + least 6 hours and at most 366 days from the + ``create_time`` of the source backup. Once the + ``expire_time`` has passed, the backup is eligible to be + automatically deleted by Cloud Spanner to free the + resources used by the backup. + + This corresponds to the ``expire_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_id, source_backup, expire_time] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.CopyBackupRequest): + request = backup.CopyBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_id is not None: + request.backup_id = backup_id + if source_backup is not None: + request.source_backup = source_backup + if expire_time is not None: + request.expire_time = expire_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.copy_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backup.Backup, + metadata_type=backup.CopyBackupMetadata, + ) + + # Done; return the response. + return response + + async def get_backup(self, + request: Optional[Union[backup.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup.Backup: + r"""Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]]): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + name (:class:`str`): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.GetBackupRequest): + request = backup.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup(self, + request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[gsad_backup.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup.Backup: + r"""Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = await client.update_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]]): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only + supported for the following fields: + + - ``backup.expire_time``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be + updated. This mask is relative to the Backup resource, + not to the request message. The field mask must always + be specified; this prevents any future fields from being + erased accidentally by clients that do not know about + them. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.UpdateBackupRequest): + request = gsad_backup.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup.name", request.backup.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup(self, + request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]]): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + name (:class:`str`): + Required. Name of the backup to delete. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.DeleteBackupRequest): + request = backup.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_backups(self, + request: Optional[Union[backup.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]]): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + parent (:class:`str`): + Required. The instance to list backups from. Values are + of the form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupsRequest): + request = backup.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def restore_database(self, + request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database_id: Optional[str] = None, + backup: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]]): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + parent (:class:`str`): + Required. The name of the instance in which to create + the restored database. This instance must be in the same + project and have the same instance configuration as the + instance containing the source backup. Values are of the + form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. The id of the database to create and restore + to. This database must not already exist. The + ``database_id`` appended to ``parent`` forms the full + database name of the form + ``projects//instances//databases/``. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`str`): + Name of the backup from which to restore. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database_id, backup] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): + request = spanner_database_admin.RestoreDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database_id is not None: + request.database_id = database_id + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.restore_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def list_database_operations(self, + request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseOperationsAsyncPager: + r"""Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]]): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + parent (:class:`str`): + Required. The instance of the database operations. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): + request = spanner_database_admin.ListDatabaseOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabaseOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_operations(self, + request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupOperationsAsyncPager: + r"""Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]]): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + parent (:class:`str`): + Required. The instance of the backup operations. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupOperationsRequest): + request = backup.ListBackupOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_database_roles(self, + request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseRolesAsyncPager: + r"""Lists Cloud Spanner database roles. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]]): + The request object. The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + parent (:class:`str`): + Required. The database whose roles should be listed. + Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager: + The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): + request = spanner_database_admin.ListDatabaseRolesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_roles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabaseRolesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def add_split_points(self, + request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, + *, + database: Optional[str] = None, + split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.AddSplitPointsResponse: + r"""Adds split points to specified tables, indexes of a + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = await client.add_split_points(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]]): + The request object. The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + database (:class:`str`): + Required. The database on whose tables/indexes split + points are to be added. Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + split_points (:class:`MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]`): + Required. The split points to add. + This corresponds to the ``split_points`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: + The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, split_points] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): + request = spanner_database_admin.AddSplitPointsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if split_points: + request.split_points.extend(split_points) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.add_split_points] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + backup_schedule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Creates a new backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]]): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + parent (:class:`str`): + Required. The name of the database + that this backup schedule applies to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule_id (:class:`str`): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the + full backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``backup_schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_schedule, backup_schedule_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): + request = gsad_backup_schedule.CreateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if backup_schedule_id is not None: + request.backup_schedule_id = backup_schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_schedule(self, + request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup_schedule.BackupSchedule: + r"""Gets backup schedule for the input schedule name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]]): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + name (:class:`str`): + Required. The name of the schedule to retrieve. Values + are of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.GetBackupScheduleRequest): + request = backup_schedule.GetBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, + *, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]]): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated + as specified by ``update_mask`` are required. Other + fields are ignored. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which + fields in the BackupSchedule resource + should be updated. This mask is relative + to the BackupSchedule resource, not to + the request message. The field mask must + always be specified; this prevents any + future fields from being erased + accidentally. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_schedule, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): + request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup_schedule.name", request.backup_schedule.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_schedule(self, + request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]]): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + name (:class:`str`): + Required. The name of the schedule to delete. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): + request = backup_schedule.DeleteBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_backup_schedules(self, + request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupSchedulesAsyncPager: + r"""Lists all the backup schedules for the database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]]): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + parent (:class:`str`): + Required. Database is the parent + resource whose backup schedules should + be listed. Values are of the form + projects//instances//databases/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): + request = backup_schedule.ListBackupSchedulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupSchedulesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def internal_update_graph_operation(self, + request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, + *, + database: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: + r"""This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = await client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]]): + The request object. Internal request proto, do not use + directly. + database (:class:`str`): + Internal field, do not use directly. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (:class:`str`): + Internal field, do not use directly. + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: + Internal response proto, do not use + directly. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, operation_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): + request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.internal_update_graph_operation] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "DatabaseAdminAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "DatabaseAdminAsyncClient", +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py new file mode 100644 index 0000000000..38e3050e48 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -0,0 +1,4387 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import uuid +import warnings + +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DatabaseAdminGrpcTransport +from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .transports.rest import DatabaseAdminRestTransport + + +class DatabaseAdminClientMeta(type): + """Metaclass for the DatabaseAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] + _transport_registry["grpc"] = DatabaseAdminGrpcTransport + _transport_registry["grpc_asyncio"] = DatabaseAdminGrpcAsyncIOTransport + _transport_registry["rest"] = DatabaseAdminRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DatabaseAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DatabaseAdminClient(metaclass=DatabaseAdminClientMeta): + """Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DatabaseAdminTransport: + """Returns the transport used by the client instance. + + Returns: + DatabaseAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path(project: str,instance: str,backup: str,) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str,str]: + """Parses a backup path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/backups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def backup_schedule_path(project: str,instance: str,database: str,schedule: str,) -> str: + """Returns a fully-qualified backup_schedule string.""" + return "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) + + @staticmethod + def parse_backup_schedule_path(path: str) -> Dict[str,str]: + """Parses a backup_schedule path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_version_path(project: str,location: str,key_ring: str,crypto_key: str,crypto_key_version: str,) -> str: + """Returns a fully-qualified crypto_key_version string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) + + @staticmethod + def parse_crypto_key_version_path(path: str) -> Dict[str,str]: + """Parses a crypto_key_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def database_path(project: str,instance: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def database_role_path(project: str,instance: str,database: str,role: str,) -> str: + """Returns a fully-qualified database_role string.""" + return "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) + + @staticmethod + def parse_database_role_path(path: str) -> Dict[str,str]: + """Parses a database_role path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/databaseRoles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_path(project: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: + """Returns a fully-qualified instance_partition string.""" + return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + + @staticmethod + def parse_instance_partition_path(path: str) -> Dict[str,str]: + """Parses a instance_partition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DatabaseAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the database admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatabaseAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DatabaseAdminClient._read_environment_variables() + self._client_cert_source = DatabaseAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DatabaseAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DatabaseAdminTransport) + if transport_provided: + # transport is a DatabaseAdminTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DatabaseAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DatabaseAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DatabaseAdminTransport], Callable[..., DatabaseAdminTransport]] = ( + DatabaseAdminClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DatabaseAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.database_v1.DatabaseAdminClient`.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "credentialsType": None, + } + ) + + def list_databases(self, + request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabasesPager: + r"""Lists Cloud Spanner databases. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + parent (str): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabasesRequest): + request = spanner_database_admin.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabasesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_database(self, + request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + create_statement: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + parent (str): + Required. The name of the instance that will serve the + new database. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + create_statement (str): + Required. A ``CREATE DATABASE`` statement, which + specifies the ID of the new database. The database ID + must conform to the regular expression + ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 + characters in length. If the database ID is a reserved + word or if it contains a hyphen, the database ID must be + enclosed in backticks (:literal:`\``). + + This corresponds to the ``create_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, create_statement] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): + request = spanner_database_admin.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if create_statement is not None: + request.create_statement = create_statement + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def get_database(self, + request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.Database: + r"""Gets the state of a Cloud Spanner database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + name (str): + Required. The name of the requested database. Values are + of the form + ``projects//instances//databases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Database: + A Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseRequest): + request = spanner_database_admin.GetDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_database(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[spanner_database_admin.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]): + The request object. The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + database (google.cloud.spanner_admin_database_v1.types.Database): + Required. The database to update. The ``name`` field of + the database is of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, only + ``enable_drop_protection`` field can be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): + request = spanner_database_admin.UpdateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database.name", request.database.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.UpdateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def update_database_ddl(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + statements: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): + The request object. Enqueues the given DDL statements to be applied, in + order but not necessarily all at once, to the database + schema at some point (or points) in the future. The + server checks that the statements are executable + (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + ``NULL`` value in a column to which ``NOT NULL`` would + be added). If a statement fails, all subsequent + statements in the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + database (str): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + statements (MutableSequence[str]): + Required. DDL statements to be + applied to the database. + + This corresponds to the ``statements`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, statements] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): + request = spanner_database_admin.UpdateDatabaseDdlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if statements is not None: + request.statements = statements + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, + ) + + # Done; return the response. + return response + + def drop_database(self, + request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + client.drop_database(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + database (str): + Required. The database to be dropped. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.DropDatabaseRequest): + request = spanner_database_admin.DropDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.drop_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_database_ddl(self, + request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = client.get_database_ddl(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + database (str): + Required. The database whose schema we wish to get. + Values are of the form + ``projects//instances//databases/`` + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): + request = spanner_database_admin.GetDatabaseDdlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup(self, + request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[gsad_backup.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + parent (str): + Required. The name of the instance in which the backup + will be created. This must be the same instance that + contains the database the backup will be created from. + The backup will be stored in the location(s) specified + in the instance configuration of this instance. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to create. + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full + backup name of the form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup, backup_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.CreateBackupRequest): + request = gsad_backup.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gsad_backup.Backup, + metadata_type=gsad_backup.CreateBackupMetadata, + ) + + # Done; return the response. + return response + + def copy_backup(self, + request: Optional[Union[backup.CopyBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_id: Optional[str] = None, + source_backup: Optional[str] = None, + expire_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]): + The request object. The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + parent (str): + Required. The name of the destination instance that will + contain the backup copy. Values are of the form: + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The id of the backup copy. The ``backup_id`` + appended to ``parent`` forms the full backup_uri of the + form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source_backup (str): + Required. The source backup to be copied. The source + backup needs to be in READY state for it to be copied. + Once CopyBackup is in progress, the source backup cannot + be deleted or cleaned up on expiration until CopyBackup + is finished. Values are of the form: + ``projects//instances//backups/``. + + This corresponds to the ``source_backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The expiration time of the backup in + microsecond granularity. The expiration time must be at + least 6 hours and at most 366 days from the + ``create_time`` of the source backup. Once the + ``expire_time`` has passed, the backup is eligible to be + automatically deleted by Cloud Spanner to free the + resources used by the backup. + + This corresponds to the ``expire_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_id, source_backup, expire_time] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.CopyBackupRequest): + request = backup.CopyBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_id is not None: + request.backup_id = backup_id + if source_backup is not None: + request.source_backup = source_backup + if expire_time is not None: + request.expire_time = expire_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.copy_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backup.Backup, + metadata_type=backup.CopyBackupMetadata, + ) + + # Done; return the response. + return response + + def get_backup(self, + request: Optional[Union[backup.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup.Backup: + r"""Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + name (str): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.GetBackupRequest): + request = backup.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup(self, + request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[gsad_backup.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup.Backup: + r"""Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = client.update_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only + supported for the following fields: + + - ``backup.expire_time``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be + updated. This mask is relative to the Backup resource, + not to the request message. The field mask must always + be specified; this prevents any future fields from being + erased accidentally by clients that do not know about + them. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.UpdateBackupRequest): + request = gsad_backup.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup.name", request.backup.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup(self, + request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + client.delete_backup(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + name (str): + Required. Name of the backup to delete. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.DeleteBackupRequest): + request = backup.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_backups(self, + request: Optional[Union[backup.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + parent (str): + Required. The instance to list backups from. Values are + of the form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupsRequest): + request = backup.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def restore_database(self, + request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database_id: Optional[str] = None, + backup: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + parent (str): + Required. The name of the instance in which to create + the restored database. This instance must be in the same + project and have the same instance configuration as the + instance containing the source backup. Values are of the + form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. The id of the database to create and restore + to. This database must not already exist. The + ``database_id`` appended to ``parent`` forms the full + database name of the form + ``projects//instances//databases/``. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (str): + Name of the backup from which to restore. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database_id, backup] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): + request = spanner_database_admin.RestoreDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database_id is not None: + request.database_id = database_id + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + def list_database_operations(self, + request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseOperationsPager: + r"""Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + parent (str): + Required. The instance of the database operations. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): + request = spanner_database_admin.ListDatabaseOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_database_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabaseOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_operations(self, + request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupOperationsPager: + r"""Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + parent (str): + Required. The instance of the backup operations. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupOperationsRequest): + request = backup.ListBackupOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_database_roles(self, + request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseRolesPager: + r"""Lists Cloud Spanner database roles. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]): + The request object. The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + parent (str): + Required. The database whose roles should be listed. + Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager: + The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): + request = spanner_database_admin.ListDatabaseRolesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_database_roles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabaseRolesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_split_points(self, + request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, + *, + database: Optional[str] = None, + split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.AddSplitPointsResponse: + r"""Adds split points to specified tables, indexes of a + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = client.add_split_points(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]): + The request object. The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + database (str): + Required. The database on whose tables/indexes split + points are to be added. Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): + Required. The split points to add. + This corresponds to the ``split_points`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: + The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, split_points] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): + request = spanner_database_admin.AddSplitPointsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if split_points is not None: + request.split_points = split_points + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_split_points] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + backup_schedule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Creates a new backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + parent (str): + Required. The name of the database + that this backup schedule applies to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule_id (str): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the + full backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``backup_schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_schedule, backup_schedule_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): + request = gsad_backup_schedule.CreateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if backup_schedule_id is not None: + request.backup_schedule_id = backup_schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_schedule(self, + request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup_schedule.BackupSchedule: + r"""Gets backup schedule for the input schedule name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + name (str): + Required. The name of the schedule to retrieve. Values + are of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.GetBackupScheduleRequest): + request = backup_schedule.GetBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, + *, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated + as specified by ``update_mask`` are required. Other + fields are ignored. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which + fields in the BackupSchedule resource + should be updated. This mask is relative + to the BackupSchedule resource, not to + the request message. The field mask must + always be specified; this prevents any + future fields from being erased + accidentally. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_schedule, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): + request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup_schedule.name", request.backup_schedule.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_schedule(self, + request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + name (str): + Required. The name of the schedule to delete. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): + request = backup_schedule.DeleteBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_backup_schedules(self, + request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupSchedulesPager: + r"""Lists all the backup schedules for the database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + parent (str): + Required. Database is the parent + resource whose backup schedules should + be listed. Values are of the form + projects//instances//databases/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): + request = backup_schedule.ListBackupSchedulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupSchedulesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def internal_update_graph_operation(self, + request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, + *, + database: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: + r"""This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]): + The request object. Internal request proto, do not use + directly. + database (str): + Internal field, do not use directly. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (str): + Internal field, do not use directly. + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: + Internal response proto, do not use + directly. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, operation_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): + request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.internal_update_graph_operation] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DatabaseAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "DatabaseAdminClient", +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py new file mode 100644 index 0000000000..5a5b92e7d1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -0,0 +1,864 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.longrunning import operations_pb2 # type: ignore + + +class ListDatabasesPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabasesResponse], + request: spanner_database_admin.ListDatabasesRequest, + response: spanner_database_admin.ListDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_database_admin.Database]: + for page in self.pages: + yield from page.databases + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabasesAsyncPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabasesResponse]], + request: spanner_database_admin.ListDatabasesRequest, + response: spanner_database_admin.ListDatabasesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: + async def async_generator(): + async for page in self.pages: + for response in page.databases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup.ListBackupsResponse], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup.ListBackupsResponse]], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[backup.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseOperationsPager: + """A pager for iterating through ``list_database_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabaseOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabaseOperationsResponse], + request: spanner_database_admin.ListDatabaseOperationsRequest, + response: spanner_database_admin.ListDatabaseOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseOperationsAsyncPager: + """A pager for iterating through ``list_database_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabaseOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]], + request: spanner_database_admin.ListDatabaseOperationsRequest, + response: spanner_database_admin.ListDatabaseOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupOperationsPager: + """A pager for iterating through ``list_backup_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup.ListBackupOperationsResponse], + request: backup.ListBackupOperationsRequest, + response: backup.ListBackupOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupOperationsAsyncPager: + """A pager for iterating through ``list_backup_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup.ListBackupOperationsResponse]], + request: backup.ListBackupOperationsRequest, + response: backup.ListBackupOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseRolesPager: + """A pager for iterating through ``list_database_roles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``database_roles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabaseRoles`` requests and continue to iterate + through the ``database_roles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabaseRolesResponse], + request: spanner_database_admin.ListDatabaseRolesRequest, + response: spanner_database_admin.ListDatabaseRolesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseRolesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabaseRolesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_database_admin.DatabaseRole]: + for page in self.pages: + yield from page.database_roles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseRolesAsyncPager: + """A pager for iterating through ``list_database_roles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``database_roles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabaseRoles`` requests and continue to iterate + through the ``database_roles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseRolesResponse]], + request: spanner_database_admin.ListDatabaseRolesRequest, + response: spanner_database_admin.ListDatabaseRolesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseRolesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseRolesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_database_admin.DatabaseRole]: + async def async_generator(): + async for page in self.pages: + for response in page.database_roles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupSchedulesPager: + """A pager for iterating through ``list_backup_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_schedules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupSchedules`` requests and continue to iterate + through the ``backup_schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup_schedule.ListBackupSchedulesResponse], + request: backup_schedule.ListBackupSchedulesRequest, + response: backup_schedule.ListBackupSchedulesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup_schedule.ListBackupSchedulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup_schedule.ListBackupSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup_schedule.BackupSchedule]: + for page in self.pages: + yield from page.backup_schedules + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupSchedulesAsyncPager: + """A pager for iterating through ``list_backup_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_schedules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupSchedules`` requests and continue to iterate + through the ``backup_schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup_schedule.ListBackupSchedulesResponse]], + request: backup_schedule.ListBackupSchedulesRequest, + response: backup_schedule.ListBackupSchedulesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup_schedule.ListBackupSchedulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup_schedule.ListBackupSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[backup_schedule.BackupSchedule]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_schedules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst new file mode 100644 index 0000000000..f70c023a98 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DatabaseAdminTransport` is the ABC for all transports. +- public child `DatabaseAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DatabaseAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDatabaseAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DatabaseAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py new file mode 100644 index 0000000000..975834b54d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DatabaseAdminTransport +from .grpc import DatabaseAdminGrpcTransport +from .grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .rest import DatabaseAdminRestTransport +from .rest import DatabaseAdminRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] +_transport_registry['grpc'] = DatabaseAdminGrpcTransport +_transport_registry['grpc_asyncio'] = DatabaseAdminGrpcAsyncIOTransport +_transport_registry['rest'] = DatabaseAdminRestTransport + +__all__ = ( + 'DatabaseAdminTransport', + 'DatabaseAdminGrpcTransport', + 'DatabaseAdminGrpcAsyncIOTransport', + 'DatabaseAdminRestTransport', + 'DatabaseAdminRestInterceptor', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py new file mode 100644 index 0000000000..7d801501d3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -0,0 +1,795 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DatabaseAdminTransport(abc.ABC): + """Abstract transport class for DatabaseAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database: gapic_v1.method.wrap_method( + self.get_database, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database: gapic_v1.method.wrap_method( + self.update_database, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database_ddl: gapic_v1.method.wrap_method( + self.update_database_ddl, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.drop_database: gapic_v1.method.wrap_method( + self.drop_database, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database_ddl: gapic_v1.method.wrap_method( + self.get_database_ddl, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.copy_backup: gapic_v1.method.wrap_method( + self.copy_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.restore_database: gapic_v1.method.wrap_method( + self.restore_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_operations: gapic_v1.method.wrap_method( + self.list_database_operations, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_operations: gapic_v1.method.wrap_method( + self.list_backup_operations, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_roles: gapic_v1.method.wrap_method( + self.list_database_roles, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.add_split_points: gapic_v1.method.wrap_method( + self.add_split_points, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_backup_schedule: gapic_v1.method.wrap_method( + self.create_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup_schedule: gapic_v1.method.wrap_method( + self.get_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup_schedule: gapic_v1.method.wrap_method( + self.update_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup_schedule: gapic_v1.method.wrap_method( + self.delete_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_schedules: gapic_v1.method.wrap_method( + self.list_backup_schedules, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.internal_update_graph_operation: gapic_v1.method.wrap_method( + self.internal_update_graph_operation, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + Union[ + spanner_database_admin.ListDatabasesResponse, + Awaitable[spanner_database_admin.ListDatabasesResponse] + ]]: + raise NotImplementedError() + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + Union[ + spanner_database_admin.Database, + Awaitable[spanner_database_admin.Database] + ]]: + raise NotImplementedError() + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + Union[ + spanner_database_admin.GetDatabaseDdlResponse, + Awaitable[spanner_database_admin.GetDatabaseDdlResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + Union[ + backup.Backup, + Awaitable[backup.Backup] + ]]: + raise NotImplementedError() + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + Union[ + gsad_backup.Backup, + Awaitable[gsad_backup.Backup] + ]]: + raise NotImplementedError() + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + Union[ + backup.ListBackupsResponse, + Awaitable[backup.ListBackupsResponse] + ]]: + raise NotImplementedError() + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + Union[ + spanner_database_admin.ListDatabaseOperationsResponse, + Awaitable[spanner_database_admin.ListDatabaseOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + Union[ + backup.ListBackupOperationsResponse, + Awaitable[backup.ListBackupOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + Union[ + spanner_database_admin.ListDatabaseRolesResponse, + Awaitable[spanner_database_admin.ListDatabaseRolesResponse] + ]]: + raise NotImplementedError() + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + Union[ + spanner_database_admin.AddSplitPointsResponse, + Awaitable[spanner_database_admin.AddSplitPointsResponse] + ]]: + raise NotImplementedError() + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + Union[ + gsad_backup_schedule.BackupSchedule, + Awaitable[gsad_backup_schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + Union[ + backup_schedule.BackupSchedule, + Awaitable[backup_schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + Union[ + gsad_backup_schedule.BackupSchedule, + Awaitable[gsad_backup_schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + Union[ + backup_schedule.ListBackupSchedulesResponse, + Awaitable[backup_schedule.ListBackupSchedulesResponse] + ]]: + raise NotImplementedError() + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + Union[ + spanner_database_admin.InternalUpdateGraphOperationResponse, + Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DatabaseAdminTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py new file mode 100644 index 0000000000..8f548acc7c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -0,0 +1,1285 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DatabaseAdminGrpcTransport(DatabaseAdminTransport): + """gRPC backend transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + spanner_database_admin.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + Lists Cloud Spanner databases. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + spanner_database_admin.Database]: + r"""Return a callable for the get database method over gRPC. + + Gets the state of a Cloud Spanner database. + + Returns: + Callable[[~.GetDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, + response_deserializer=spanner_database_admin.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the update database method over gRPC. + + Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.UpdateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database' not in self._stubs: + self._stubs['update_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', + request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database'] + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + operations_pb2.Operation]: + r"""Return a callable for the update database ddl method over gRPC. + + Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Returns: + Callable[[~.UpdateDatabaseDdlRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database_ddl' not in self._stubs: + self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database_ddl'] + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + empty_pb2.Empty]: + r"""Return a callable for the drop database method over gRPC. + + Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + Returns: + Callable[[~.DropDatabaseRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'drop_database' not in self._stubs: + self._stubs['drop_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['drop_database'] + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + spanner_database_admin.GetDatabaseDdlResponse]: + r"""Return a callable for the get database ddl method over gRPC. + + Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Returns: + Callable[[~.GetDatabaseDdlRequest], + ~.GetDatabaseDdlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database_ddl' not in self._stubs: + self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, + response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, + ) + return self._stubs['get_database_ddl'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup' not in self._stubs: + self._stubs['create_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + request_serializer=gsad_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_backup'] + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + operations_pb2.Operation]: + r"""Return a callable for the copy backup method over gRPC. + + Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + Returns: + Callable[[~.CopyBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_backup' not in self._stubs: + self._stubs['copy_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', + request_serializer=backup.CopyBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_backup'] + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + backup.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + gsad_backup.Backup]: + r"""Return a callable for the update backup method over gRPC. + + Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup' not in self._stubs: + self._stubs['update_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + request_serializer=gsad_backup.UpdateBackupRequest.serialize, + response_deserializer=gsad_backup.Backup.deserialize, + ) + return self._stubs['update_backup'] + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + backup.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the restore database method over gRPC. + + Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Returns: + Callable[[~.RestoreDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + spanner_database_admin.ListDatabaseOperationsResponse]: + r"""Return a callable for the list database operations method over gRPC. + + Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Returns: + Callable[[~.ListDatabaseOperationsRequest], + ~.ListDatabaseOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_operations' not in self._stubs: + self._stubs['list_database_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, + ) + return self._stubs['list_database_operations'] + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + backup.ListBackupOperationsResponse]: + r"""Return a callable for the list backup operations method over gRPC. + + Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Returns: + Callable[[~.ListBackupOperationsRequest], + ~.ListBackupOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_operations' not in self._stubs: + self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + request_serializer=backup.ListBackupOperationsRequest.serialize, + response_deserializer=backup.ListBackupOperationsResponse.deserialize, + ) + return self._stubs['list_backup_operations'] + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + spanner_database_admin.ListDatabaseRolesResponse]: + r"""Return a callable for the list database roles method over gRPC. + + Lists Cloud Spanner database roles. + + Returns: + Callable[[~.ListDatabaseRolesRequest], + ~.ListDatabaseRolesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_roles' not in self._stubs: + self._stubs['list_database_roles'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', + request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, + ) + return self._stubs['list_database_roles'] + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + spanner_database_admin.AddSplitPointsResponse]: + r"""Return a callable for the add split points method over gRPC. + + Adds split points to specified tables, indexes of a + database. + + Returns: + Callable[[~.AddSplitPointsRequest], + ~.AddSplitPointsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'add_split_points' not in self._stubs: + self._stubs['add_split_points'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', + request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, + response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, + ) + return self._stubs['add_split_points'] + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a new backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup_schedule' not in self._stubs: + self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', + request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['create_backup_schedule'] + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + backup_schedule.BackupSchedule]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets backup schedule for the input schedule name. + + Returns: + Callable[[~.GetBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup_schedule' not in self._stubs: + self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', + request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, + response_deserializer=backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['get_backup_schedule'] + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup_schedule' not in self._stubs: + self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', + request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['update_backup_schedule'] + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup_schedule' not in self._stubs: + self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', + request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup_schedule'] + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + backup_schedule.ListBackupSchedulesResponse]: + r"""Return a callable for the list backup schedules method over gRPC. + + Lists all the backup schedules for the database. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + ~.ListBackupSchedulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_schedules' not in self._stubs: + self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', + request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, + response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs['list_backup_schedules'] + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + spanner_database_admin.InternalUpdateGraphOperationResponse]: + r"""Return a callable for the internal update graph + operation method over gRPC. + + This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + Returns: + Callable[[~.InternalUpdateGraphOperationRequest], + ~.InternalUpdateGraphOperationResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'internal_update_graph_operation' not in self._stubs: + self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', + request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, + response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, + ) + return self._stubs['internal_update_graph_operation'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DatabaseAdminGrpcTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..06cfaa6349 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -0,0 +1,1656 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import DatabaseAdminGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DatabaseAdminGrpcAsyncIOTransport(DatabaseAdminTransport): + """gRPC AsyncIO backend transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + Awaitable[spanner_database_admin.ListDatabasesResponse]]: + r"""Return a callable for the list databases method over gRPC. + + Lists Cloud Spanner databases. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + Awaitable[spanner_database_admin.Database]]: + r"""Return a callable for the get database method over gRPC. + + Gets the state of a Cloud Spanner database. + + Returns: + Callable[[~.GetDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, + response_deserializer=spanner_database_admin.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update database method over gRPC. + + Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.UpdateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database' not in self._stubs: + self._stubs['update_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', + request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database'] + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update database ddl method over gRPC. + + Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Returns: + Callable[[~.UpdateDatabaseDdlRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database_ddl' not in self._stubs: + self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database_ddl'] + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the drop database method over gRPC. + + Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + Returns: + Callable[[~.DropDatabaseRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'drop_database' not in self._stubs: + self._stubs['drop_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['drop_database'] + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + Awaitable[spanner_database_admin.GetDatabaseDdlResponse]]: + r"""Return a callable for the get database ddl method over gRPC. + + Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Returns: + Callable[[~.GetDatabaseDdlRequest], + Awaitable[~.GetDatabaseDdlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database_ddl' not in self._stubs: + self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, + response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, + ) + return self._stubs['get_database_ddl'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup' not in self._stubs: + self._stubs['create_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + request_serializer=gsad_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_backup'] + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the copy backup method over gRPC. + + Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + Returns: + Callable[[~.CopyBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_backup' not in self._stubs: + self._stubs['copy_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', + request_serializer=backup.CopyBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_backup'] + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + Awaitable[backup.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + Awaitable[gsad_backup.Backup]]: + r"""Return a callable for the update backup method over gRPC. + + Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup' not in self._stubs: + self._stubs['update_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + request_serializer=gsad_backup.UpdateBackupRequest.serialize, + response_deserializer=gsad_backup.Backup.deserialize, + ) + return self._stubs['update_backup'] + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + Awaitable[backup.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore database method over gRPC. + + Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Returns: + Callable[[~.RestoreDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]]: + r"""Return a callable for the list database operations method over gRPC. + + Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Returns: + Callable[[~.ListDatabaseOperationsRequest], + Awaitable[~.ListDatabaseOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_operations' not in self._stubs: + self._stubs['list_database_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, + ) + return self._stubs['list_database_operations'] + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + Awaitable[backup.ListBackupOperationsResponse]]: + r"""Return a callable for the list backup operations method over gRPC. + + Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Returns: + Callable[[~.ListBackupOperationsRequest], + Awaitable[~.ListBackupOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_operations' not in self._stubs: + self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + request_serializer=backup.ListBackupOperationsRequest.serialize, + response_deserializer=backup.ListBackupOperationsResponse.deserialize, + ) + return self._stubs['list_backup_operations'] + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + Awaitable[spanner_database_admin.ListDatabaseRolesResponse]]: + r"""Return a callable for the list database roles method over gRPC. + + Lists Cloud Spanner database roles. + + Returns: + Callable[[~.ListDatabaseRolesRequest], + Awaitable[~.ListDatabaseRolesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_roles' not in self._stubs: + self._stubs['list_database_roles'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', + request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, + ) + return self._stubs['list_database_roles'] + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + Awaitable[spanner_database_admin.AddSplitPointsResponse]]: + r"""Return a callable for the add split points method over gRPC. + + Adds split points to specified tables, indexes of a + database. + + Returns: + Callable[[~.AddSplitPointsRequest], + Awaitable[~.AddSplitPointsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'add_split_points' not in self._stubs: + self._stubs['add_split_points'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', + request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, + response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, + ) + return self._stubs['add_split_points'] + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + Awaitable[gsad_backup_schedule.BackupSchedule]]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a new backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup_schedule' not in self._stubs: + self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', + request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['create_backup_schedule'] + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + Awaitable[backup_schedule.BackupSchedule]]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets backup schedule for the input schedule name. + + Returns: + Callable[[~.GetBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup_schedule' not in self._stubs: + self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', + request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, + response_deserializer=backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['get_backup_schedule'] + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + Awaitable[gsad_backup_schedule.BackupSchedule]]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup_schedule' not in self._stubs: + self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', + request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['update_backup_schedule'] + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup_schedule' not in self._stubs: + self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', + request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup_schedule'] + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + Awaitable[backup_schedule.ListBackupSchedulesResponse]]: + r"""Return a callable for the list backup schedules method over gRPC. + + Lists all the backup schedules for the database. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + Awaitable[~.ListBackupSchedulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_schedules' not in self._stubs: + self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', + request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, + response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs['list_backup_schedules'] + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse]]: + r"""Return a callable for the internal update graph + operation method over gRPC. + + This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + Returns: + Callable[[~.InternalUpdateGraphOperationRequest], + Awaitable[~.InternalUpdateGraphOperationResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'internal_update_graph_operation' not in self._stubs: + self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', + request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, + response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, + ) + return self._stubs['internal_update_graph_operation'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_databases: self._wrap_method( + self.list_databases, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_database: self._wrap_method( + self.create_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database: self._wrap_method( + self.get_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database: self._wrap_method( + self.update_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database_ddl: self._wrap_method( + self.update_database_ddl, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.drop_database: self._wrap_method( + self.drop_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database_ddl: self._wrap_method( + self.get_database_ddl, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.create_backup: self._wrap_method( + self.create_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.copy_backup: self._wrap_method( + self.copy_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup: self._wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup: self._wrap_method( + self.update_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup: self._wrap_method( + self.delete_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backups: self._wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.restore_database: self._wrap_method( + self.restore_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_operations: self._wrap_method( + self.list_database_operations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_operations: self._wrap_method( + self.list_backup_operations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_roles: self._wrap_method( + self.list_database_roles, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.add_split_points: self._wrap_method( + self.add_split_points, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_backup_schedule: self._wrap_method( + self.create_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup_schedule: self._wrap_method( + self.get_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup_schedule: self._wrap_method( + self.update_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup_schedule: self._wrap_method( + self.delete_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_schedules: self._wrap_method( + self.list_backup_schedules, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.internal_update_graph_operation: self._wrap_method( + self.internal_update_graph_operation, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'DatabaseAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py new file mode 100644 index 0000000000..012c64468c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -0,0 +1,5336 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseDatabaseAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DatabaseAdminRestInterceptor: + """Interceptor for DatabaseAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DatabaseAdminRestTransport. + + .. code-block:: python + class MyCustomDatabaseAdminInterceptor(DatabaseAdminRestInterceptor): + def pre_add_split_points(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_split_points(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_copy_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_copy_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_drop_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database_ddl(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database_ddl(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_internal_update_graph_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_internal_update_graph_operation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_schedules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_schedules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_database_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_database_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_database_roles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_database_roles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database_ddl(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database_ddl(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DatabaseAdminRestTransport(interceptor=MyCustomDatabaseAdminInterceptor()) + client = DatabaseAdminClient(transport=transport) + + + """ + def pre_add_split_points(self, request: spanner_database_admin.AddSplitPointsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for add_split_points + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_add_split_points(self, response: spanner_database_admin.AddSplitPointsResponse) -> spanner_database_admin.AddSplitPointsResponse: + """Post-rpc interceptor for add_split_points + + DEPRECATED. Please use the `post_add_split_points_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_add_split_points` interceptor runs + before the `post_add_split_points_with_metadata` interceptor. + """ + return response + + def post_add_split_points_with_metadata(self, response: spanner_database_admin.AddSplitPointsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_split_points + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_add_split_points_with_metadata` + interceptor in new development instead of the `post_add_split_points` interceptor. + When both interceptors are used, this `post_add_split_points_with_metadata` interceptor runs after the + `post_add_split_points` interceptor. The (possibly modified) response returned by + `post_add_split_points` will be passed to + `post_add_split_points_with_metadata`. + """ + return response, metadata + + def pre_copy_backup(self, request: backup.CopyBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.CopyBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for copy_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_copy_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for copy_backup + + DEPRECATED. Please use the `post_copy_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_copy_backup` interceptor runs + before the `post_copy_backup_with_metadata` interceptor. + """ + return response + + def post_copy_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for copy_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_copy_backup_with_metadata` + interceptor in new development instead of the `post_copy_backup` interceptor. + When both interceptors are used, this `post_copy_backup_with_metadata` interceptor runs after the + `post_copy_backup` interceptor. The (possibly modified) response returned by + `post_copy_backup` will be passed to + `post_copy_backup_with_metadata`. + """ + return response, metadata + + def pre_create_backup(self, request: gsad_backup.CreateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.CreateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup + + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. + """ + return response + + def post_create_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + + def pre_create_backup_schedule(self, request: gsad_backup_schedule.CreateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.CreateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: + """Post-rpc interceptor for create_backup_schedule + + DEPRECATED. Please use the `post_create_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_create_backup_schedule` interceptor runs + before the `post_create_backup_schedule_with_metadata` interceptor. + """ + return response + + def post_create_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_schedule_with_metadata` + interceptor in new development instead of the `post_create_backup_schedule` interceptor. + When both interceptors are used, this `post_create_backup_schedule_with_metadata` interceptor runs after the + `post_create_backup_schedule` interceptor. The (possibly modified) response returned by + `post_create_backup_schedule` will be passed to + `post_create_backup_schedule_with_metadata`. + """ + return response, metadata + + def pre_create_database(self, request: spanner_database_admin.CreateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_database + + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. + """ + return response + + def post_create_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + + def pre_delete_backup(self, request: backup.DeleteBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def pre_delete_backup_schedule(self, request: backup_schedule.DeleteBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.DeleteBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def pre_drop_database(self, request: spanner_database_admin.DropDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.DropDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for drop_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def pre_get_backup(self, request: backup.GetBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_backup(self, response: backup.Backup) -> backup.Backup: + """Post-rpc interceptor for get_backup + + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. + """ + return response + + def post_get_backup_with_metadata(self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + + def pre_get_backup_schedule(self, request: backup_schedule.GetBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.GetBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_backup_schedule(self, response: backup_schedule.BackupSchedule) -> backup_schedule.BackupSchedule: + """Post-rpc interceptor for get_backup_schedule + + DEPRECATED. Please use the `post_get_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_backup_schedule` interceptor runs + before the `post_get_backup_schedule_with_metadata` interceptor. + """ + return response + + def post_get_backup_schedule_with_metadata(self, response: backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_schedule_with_metadata` + interceptor in new development instead of the `post_get_backup_schedule` interceptor. + When both interceptors are used, this `post_get_backup_schedule_with_metadata` interceptor runs after the + `post_get_backup_schedule` interceptor. The (possibly modified) response returned by + `post_get_backup_schedule` will be passed to + `post_get_backup_schedule_with_metadata`. + """ + return response, metadata + + def pre_get_database(self, request: spanner_database_admin.GetDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_database(self, response: spanner_database_admin.Database) -> spanner_database_admin.Database: + """Post-rpc interceptor for get_database + + DEPRECATED. Please use the `post_get_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_database` interceptor runs + before the `post_get_database_with_metadata` interceptor. + """ + return response + + def post_get_database_with_metadata(self, response: spanner_database_admin.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_database_with_metadata` + interceptor in new development instead of the `post_get_database` interceptor. + When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the + `post_get_database` interceptor. The (possibly modified) response returned by + `post_get_database` will be passed to + `post_get_database_with_metadata`. + """ + return response, metadata + + def pre_get_database_ddl(self, request: spanner_database_admin.GetDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_database_ddl + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_database_ddl(self, response: spanner_database_admin.GetDatabaseDdlResponse) -> spanner_database_admin.GetDatabaseDdlResponse: + """Post-rpc interceptor for get_database_ddl + + DEPRECATED. Please use the `post_get_database_ddl_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_database_ddl` interceptor runs + before the `post_get_database_ddl_with_metadata` interceptor. + """ + return response + + def post_get_database_ddl_with_metadata(self, response: spanner_database_admin.GetDatabaseDdlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database_ddl + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_database_ddl_with_metadata` + interceptor in new development instead of the `post_get_database_ddl` interceptor. + When both interceptors are used, this `post_get_database_ddl_with_metadata` interceptor runs after the + `post_get_database_ddl` interceptor. The (possibly modified) response returned by + `post_get_database_ddl` will be passed to + `post_get_database_ddl_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_list_backup_operations(self, request: backup.ListBackupOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backup_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backup_operations(self, response: backup.ListBackupOperationsResponse) -> backup.ListBackupOperationsResponse: + """Post-rpc interceptor for list_backup_operations + + DEPRECATED. Please use the `post_list_backup_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_backup_operations` interceptor runs + before the `post_list_backup_operations_with_metadata` interceptor. + """ + return response + + def post_list_backup_operations_with_metadata(self, response: backup.ListBackupOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backup_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backup_operations_with_metadata` + interceptor in new development instead of the `post_list_backup_operations` interceptor. + When both interceptors are used, this `post_list_backup_operations_with_metadata` interceptor runs after the + `post_list_backup_operations` interceptor. The (possibly modified) response returned by + `post_list_backup_operations` will be passed to + `post_list_backup_operations_with_metadata`. + """ + return response, metadata + + def pre_list_backups(self, request: backup.ListBackupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backups(self, response: backup.ListBackupsResponse) -> backup.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. + """ + return response + + def post_list_backups_with_metadata(self, response: backup.ListBackupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + + def pre_list_backup_schedules(self, request: backup_schedule.ListBackupSchedulesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backup_schedules + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backup_schedules(self, response: backup_schedule.ListBackupSchedulesResponse) -> backup_schedule.ListBackupSchedulesResponse: + """Post-rpc interceptor for list_backup_schedules + + DEPRECATED. Please use the `post_list_backup_schedules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_backup_schedules` interceptor runs + before the `post_list_backup_schedules_with_metadata` interceptor. + """ + return response + + def post_list_backup_schedules_with_metadata(self, response: backup_schedule.ListBackupSchedulesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backup_schedules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backup_schedules_with_metadata` + interceptor in new development instead of the `post_list_backup_schedules` interceptor. + When both interceptors are used, this `post_list_backup_schedules_with_metadata` interceptor runs after the + `post_list_backup_schedules` interceptor. The (possibly modified) response returned by + `post_list_backup_schedules` will be passed to + `post_list_backup_schedules_with_metadata`. + """ + return response, metadata + + def pre_list_database_operations(self, request: spanner_database_admin.ListDatabaseOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_database_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_database_operations(self, response: spanner_database_admin.ListDatabaseOperationsResponse) -> spanner_database_admin.ListDatabaseOperationsResponse: + """Post-rpc interceptor for list_database_operations + + DEPRECATED. Please use the `post_list_database_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_database_operations` interceptor runs + before the `post_list_database_operations_with_metadata` interceptor. + """ + return response + + def post_list_database_operations_with_metadata(self, response: spanner_database_admin.ListDatabaseOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_database_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_database_operations_with_metadata` + interceptor in new development instead of the `post_list_database_operations` interceptor. + When both interceptors are used, this `post_list_database_operations_with_metadata` interceptor runs after the + `post_list_database_operations` interceptor. The (possibly modified) response returned by + `post_list_database_operations` will be passed to + `post_list_database_operations_with_metadata`. + """ + return response, metadata + + def pre_list_database_roles(self, request: spanner_database_admin.ListDatabaseRolesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_database_roles + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_database_roles(self, response: spanner_database_admin.ListDatabaseRolesResponse) -> spanner_database_admin.ListDatabaseRolesResponse: + """Post-rpc interceptor for list_database_roles + + DEPRECATED. Please use the `post_list_database_roles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_database_roles` interceptor runs + before the `post_list_database_roles_with_metadata` interceptor. + """ + return response + + def post_list_database_roles_with_metadata(self, response: spanner_database_admin.ListDatabaseRolesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_database_roles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_database_roles_with_metadata` + interceptor in new development instead of the `post_list_database_roles` interceptor. + When both interceptors are used, this `post_list_database_roles_with_metadata` interceptor runs after the + `post_list_database_roles` interceptor. The (possibly modified) response returned by + `post_list_database_roles` will be passed to + `post_list_database_roles_with_metadata`. + """ + return response, metadata + + def pre_list_databases(self, request: spanner_database_admin.ListDatabasesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_databases(self, response: spanner_database_admin.ListDatabasesResponse) -> spanner_database_admin.ListDatabasesResponse: + """Post-rpc interceptor for list_databases + + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. + """ + return response + + def post_list_databases_with_metadata(self, response: spanner_database_admin.ListDatabasesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + + def pre_restore_database(self, request: spanner_database_admin.RestoreDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.RestoreDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for restore_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_restore_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_database + + DEPRECATED. Please use the `post_restore_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_restore_database` interceptor runs + before the `post_restore_database_with_metadata` interceptor. + """ + return response + + def post_restore_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_restore_database_with_metadata` + interceptor in new development instead of the `post_restore_database` interceptor. + When both interceptors are used, this `post_restore_database_with_metadata` interceptor runs after the + `post_restore_database` interceptor. The (possibly modified) response returned by + `post_restore_database` will be passed to + `post_restore_database_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + def pre_update_backup(self, request: gsad_backup.UpdateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.UpdateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_backup(self, response: gsad_backup.Backup) -> gsad_backup.Backup: + """Post-rpc interceptor for update_backup + + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. + """ + return response + + def post_update_backup_with_metadata(self, response: gsad_backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + + def pre_update_backup_schedule(self, request: gsad_backup_schedule.UpdateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.UpdateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: + """Post-rpc interceptor for update_backup_schedule + + DEPRECATED. Please use the `post_update_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_backup_schedule` interceptor runs + before the `post_update_backup_schedule_with_metadata` interceptor. + """ + return response + + def post_update_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_schedule_with_metadata` + interceptor in new development instead of the `post_update_backup_schedule` interceptor. + When both interceptors are used, this `post_update_backup_schedule_with_metadata` interceptor runs after the + `post_update_backup_schedule` interceptor. The (possibly modified) response returned by + `post_update_backup_schedule` will be passed to + `post_update_backup_schedule_with_metadata`. + """ + return response, metadata + + def pre_update_database(self, request: spanner_database_admin.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_database + + DEPRECATED. Please use the `post_update_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_database` interceptor runs + before the `post_update_database_with_metadata` interceptor. + """ + return response + + def post_update_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_database_with_metadata` + interceptor in new development instead of the `post_update_database` interceptor. + When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the + `post_update_database` interceptor. The (possibly modified) response returned by + `post_update_database` will be passed to + `post_update_database_with_metadata`. + """ + return response, metadata + + def pre_update_database_ddl(self, request: spanner_database_admin.UpdateDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_database_ddl + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_database_ddl(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_database_ddl + + DEPRECATED. Please use the `post_update_database_ddl_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_database_ddl` interceptor runs + before the `post_update_database_ddl_with_metadata` interceptor. + """ + return response + + def post_update_database_ddl_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database_ddl + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_database_ddl_with_metadata` + interceptor in new development instead of the `post_update_database_ddl` interceptor. + When both interceptors are used, this `post_update_database_ddl_with_metadata` interceptor runs after the + `post_update_database_ddl` interceptor. The (possibly modified) response returned by + `post_update_database_ddl` will be passed to + `post_update_database_ddl_with_metadata`. + """ + return response, metadata + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DatabaseAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DatabaseAdminRestInterceptor + + +class DatabaseAdminRestTransport(_BaseDatabaseAdminRestTransport): + """REST backend synchronous transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DatabaseAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DatabaseAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _AddSplitPoints(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.AddSplitPoints") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.AddSplitPointsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.AddSplitPointsResponse: + r"""Call the add split points method over HTTP. + + Args: + request (~.spanner_database_admin.AddSplitPointsRequest): + The request object. The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.AddSplitPointsResponse: + The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_http_options() + + request, metadata = self._interceptor.pre_add_split_points(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.AddSplitPoints", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "AddSplitPoints", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._AddSplitPoints._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.AddSplitPointsResponse() + pb_resp = spanner_database_admin.AddSplitPointsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_add_split_points(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_split_points_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.AddSplitPointsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.add_split_points", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "AddSplitPoints", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CopyBackup(_BaseDatabaseAdminRestTransport._BaseCopyBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CopyBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: backup.CopyBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the copy backup method over HTTP. + + Args: + request (~.backup.CopyBackupRequest): + The request object. The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_http_options() + + request, metadata = self._interceptor.pre_copy_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CopyBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CopyBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CopyBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_copy_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_copy_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.copy_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CopyBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateBackup(_BaseDatabaseAdminRestTransport._BaseCreateBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CreateBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup.CreateBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create backup method over HTTP. + + Args: + request (~.gsad_backup.CreateBackupRequest): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_http_options() + + request, metadata = self._interceptor.pre_create_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CreateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CreateBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup_schedule.CreateBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Call the create backup schedule method over HTTP. + + Args: + request (~.gsad_backup_schedule.CreateBackupScheduleRequest): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gsad_backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_create_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CreateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup_schedule.BackupSchedule() + pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_schedule_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup_schedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDatabase(_BaseDatabaseAdminRestTransport._BaseCreateDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CreateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.CreateDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create database method over HTTP. + + Args: + request (~.spanner_database_admin.CreateDatabaseRequest): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_http_options() + + request, metadata = self._interceptor.pre_create_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CreateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteBackup(_BaseDatabaseAdminRestTransport._BaseDeleteBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DeleteBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.DeleteBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete backup method over HTTP. + + Args: + request (~.backup.DeleteBackupRequest): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_http_options() + + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DeleteBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DeleteBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteBackupSchedule(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DeleteBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup_schedule.DeleteBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete backup schedule method over HTTP. + + Args: + request (~.backup_schedule.DeleteBackupScheduleRequest): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_delete_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DeleteBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DeleteBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DropDatabase(_BaseDatabaseAdminRestTransport._BaseDropDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DropDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.DropDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the drop database method over HTTP. + + Args: + request (~.spanner_database_admin.DropDatabaseRequest): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_http_options() + + request, metadata = self._interceptor.pre_drop_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DropDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DropDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DropDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBackup(_BaseDatabaseAdminRestTransport._BaseGetBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.GetBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.backup.GetBackupRequest): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup.Backup: + A backup of a Cloud Spanner database. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_http_options() + + request, metadata = self._interceptor.pre_get_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.Backup() + pb_resp = backup.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup.Backup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetBackupSchedule(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup_schedule.GetBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup_schedule.BackupSchedule: + r"""Call the get backup schedule method over HTTP. + + Args: + request (~.backup_schedule.GetBackupScheduleRequest): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_get_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_schedule.BackupSchedule() + pb_resp = backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_schedule_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup_schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup_schedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDatabase(_BaseDatabaseAdminRestTransport._BaseGetDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.GetDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.Database: + r"""Call the get database method over HTTP. + + Args: + request (~.spanner_database_admin.GetDatabaseRequest): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.Database: + A Cloud Spanner database. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_http_options() + + request, metadata = self._interceptor.pre_get_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.Database() + pb_resp = spanner_database_admin.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.Database.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetDatabaseDdl") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.GetDatabaseDdlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Call the get database ddl method over HTTP. + + Args: + request (~.spanner_database_admin.GetDatabaseDdlRequest): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_http_options() + + request, metadata = self._interceptor.pre_get_database_ddl(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabaseDdl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabaseDdl", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.GetDatabaseDdlResponse() + pb_resp = spanner_database_admin.GetDatabaseDdlResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_database_ddl(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_ddl_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.GetDatabaseDdlResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database_ddl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabaseDdl", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_iam_policy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _InternalUpdateGraphOperation(_BaseDatabaseAdminRestTransport._BaseInternalUpdateGraphOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.InternalUpdateGraphOperation") + + def __call__(self, + request: spanner_database_admin.InternalUpdateGraphOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: + raise NotImplementedError( + "Method InternalUpdateGraphOperation is not available over REST transport" + ) + class _ListBackupOperations(_BaseDatabaseAdminRestTransport._BaseListBackupOperations, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListBackupOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.ListBackupOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup.ListBackupOperationsResponse: + r"""Call the list backup operations method over HTTP. + + Args: + request (~.backup.ListBackupOperationsRequest): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup.ListBackupOperationsResponse: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_backup_operations(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListBackupOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.ListBackupOperationsResponse() + pb_resp = backup.ListBackupOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup.ListBackupOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_operations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackups(_BaseDatabaseAdminRestTransport._BaseListBackups, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListBackups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.ListBackupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backup.ListBackupsRequest): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup.ListBackupsResponse: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListBackups._get_http_options() + + request, metadata = self._interceptor.pre_list_backups(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackups._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListBackups._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackups", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListBackups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.ListBackupsResponse() + pb_resp = backup.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup.ListBackupsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backups", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackupSchedules(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListBackupSchedules") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup_schedule.ListBackupSchedulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup_schedule.ListBackupSchedulesResponse: + r"""Call the list backup schedules method over HTTP. + + Args: + request (~.backup_schedule.ListBackupSchedulesRequest): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup_schedule.ListBackupSchedulesResponse: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_http_options() + + request, metadata = self._interceptor.pre_list_backup_schedules(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupSchedules", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupSchedules", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListBackupSchedules._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_schedule.ListBackupSchedulesResponse() + pb_resp = backup_schedule.ListBackupSchedulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_schedules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_schedules_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup_schedule.ListBackupSchedulesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_schedules", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupSchedules", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDatabaseOperations(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListDatabaseOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.ListDatabaseOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.ListDatabaseOperationsResponse: + r"""Call the list database operations method over HTTP. + + Args: + request (~.spanner_database_admin.ListDatabaseOperationsRequest): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.ListDatabaseOperationsResponse: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_database_operations(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListDatabaseOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.ListDatabaseOperationsResponse() + pb_resp = spanner_database_admin.ListDatabaseOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_database_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_database_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.ListDatabaseOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_operations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDatabaseRoles(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListDatabaseRoles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.ListDatabaseRolesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.ListDatabaseRolesResponse: + r"""Call the list database roles method over HTTP. + + Args: + request (~.spanner_database_admin.ListDatabaseRolesRequest): + The request object. The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.ListDatabaseRolesResponse: + The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_http_options() + + request, metadata = self._interceptor.pre_list_database_roles(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseRoles", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseRoles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListDatabaseRoles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.ListDatabaseRolesResponse() + pb_resp = spanner_database_admin.ListDatabaseRolesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_database_roles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_database_roles_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.ListDatabaseRolesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_roles", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseRoles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDatabases(_BaseDatabaseAdminRestTransport._BaseListDatabases, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListDatabases") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.ListDatabasesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.ListDatabasesResponse: + r"""Call the list databases method over HTTP. + + Args: + request (~.spanner_database_admin.ListDatabasesRequest): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.ListDatabasesResponse: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_http_options() + + request, metadata = self._interceptor.pre_list_databases(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabases", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabases", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListDatabases._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.ListDatabasesResponse() + pb_resp = spanner_database_admin.ListDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.ListDatabasesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_databases", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabases", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _RestoreDatabase(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.RestoreDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.RestoreDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the restore database method over HTTP. + + Args: + request (~.spanner_database_admin.RestoreDatabaseRequest): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_http_options() + + request, metadata = self._interceptor.pre_restore_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.RestoreDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "RestoreDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._RestoreDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_restore_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.restore_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "RestoreDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.SetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.set_iam_policy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.TestIamPermissions", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.test_iam_permissions", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateBackup(_BaseDatabaseAdminRestTransport._BaseUpdateBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup.UpdateBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> gsad_backup.Backup: + r"""Call the update backup method over HTTP. + + Args: + request (~.gsad_backup.UpdateBackupRequest): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gsad_backup.Backup: + A backup of a Cloud Spanner database. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_http_options() + + request, metadata = self._interceptor.pre_update_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup.Backup() + pb_resp = gsad_backup.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gsad_backup.Backup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup_schedule.UpdateBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Call the update backup schedule method over HTTP. + + Args: + request (~.gsad_backup_schedule.UpdateBackupScheduleRequest): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gsad_backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_update_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup_schedule.BackupSchedule() + pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_schedule_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup_schedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDatabase(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.UpdateDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update database method over HTTP. + + Args: + request (~.spanner_database_admin.UpdateDatabaseRequest): + The request object. The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_http_options() + + request, metadata = self._interceptor.pre_update_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateDatabaseDdl") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.UpdateDatabaseDdlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update database ddl method over HTTP. + + Args: + request (~.spanner_database_admin.UpdateDatabaseDdlRequest): + The request object. Enqueues the given DDL statements to be applied, in + order but not necessarily all at once, to the database + schema at some point (or points) in the future. The + server checks that the statements are executable + (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + ``NULL`` value in a column to which ``NOT NULL`` would + be added). If a statement fails, all subsequent + statements in the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_http_options() + + request, metadata = self._interceptor.pre_update_database_ddl(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabaseDdl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabaseDdl", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_database_ddl(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_ddl_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database_ddl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabaseDdl", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + spanner_database_admin.AddSplitPointsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddSplitPoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CopyBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DropDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + backup.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + backup_schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + spanner_database_admin.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + spanner_database_admin.GetDatabaseDdlResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + spanner_database_admin.InternalUpdateGraphOperationResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InternalUpdateGraphOperation(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + backup.ListBackupOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + backup.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + backup_schedule.ListBackupSchedulesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + spanner_database_admin.ListDatabaseOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabaseOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + spanner_database_admin.ListDatabaseRolesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabaseRoles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + spanner_database_admin.ListDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + gsad_backup.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseDatabaseAdminRestTransport._BaseCancelOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CancelOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseDatabaseAdminRestTransport._BaseDeleteOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseDatabaseAdminRestTransport._BaseGetOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseDatabaseAdminRestTransport._BaseListOperations, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DatabaseAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py new file mode 100644 index 0000000000..0cfaa08199 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py @@ -0,0 +1,1378 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDatabaseAdminRestTransport(DatabaseAdminTransport): + """Base REST backend transport for DatabaseAdmin. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseAddSplitPoints: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.AddSplitPointsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCopyBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/backups:copy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.CopyBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCopyBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/backups', + 'body': 'backup', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup.CreateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupScheduleId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', + 'body': 'backup_schedule', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup_schedule.CreateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/databases', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup_schedule.DeleteBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDropDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.DropDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseDropDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup_schedule.GetBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.GetDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDatabaseDdl: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.GetDatabaseDdlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:getIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:getIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseInternalUpdateGraphOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseListBackupOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/backupOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.ListBackupOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/backups', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackups._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackupSchedules: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup_schedule.ListBackupSchedulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabaseOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/databaseOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.ListDatabaseOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabaseRoles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.ListDatabaseRolesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabases: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/databases', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabases._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRestoreDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/databases:restore', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.RestoreDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/databaseRoles/*}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{backup.name=projects/*/instances/*/backups/*}', + 'body': 'backup', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup.UpdateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}', + 'body': 'backup_schedule', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{database.name=projects/*/instances/*/databases/*}', + 'body': 'database', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.UpdateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDatabaseDdl: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.UpdateDatabaseDdlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseDatabaseAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py new file mode 100644 index 0000000000..84cb902d6c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .backup import ( + Backup, + BackupInfo, + BackupInstancePartition, + CopyBackupEncryptionConfig, + CopyBackupMetadata, + CopyBackupRequest, + CreateBackupEncryptionConfig, + CreateBackupMetadata, + CreateBackupRequest, + DeleteBackupRequest, + FullBackupSpec, + GetBackupRequest, + IncrementalBackupSpec, + ListBackupOperationsRequest, + ListBackupOperationsResponse, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from .backup_schedule import ( + BackupSchedule, + BackupScheduleSpec, + CreateBackupScheduleRequest, + CrontabSpec, + DeleteBackupScheduleRequest, + GetBackupScheduleRequest, + ListBackupSchedulesRequest, + ListBackupSchedulesResponse, + UpdateBackupScheduleRequest, +) +from .common import ( + EncryptionConfig, + EncryptionInfo, + OperationProgress, + DatabaseDialect, +) +from .spanner_database_admin import ( + AddSplitPointsRequest, + AddSplitPointsResponse, + CreateDatabaseMetadata, + CreateDatabaseRequest, + Database, + DatabaseRole, + DdlStatementActionInfo, + DropDatabaseRequest, + GetDatabaseDdlRequest, + GetDatabaseDdlResponse, + GetDatabaseRequest, + InternalUpdateGraphOperationRequest, + InternalUpdateGraphOperationResponse, + ListDatabaseOperationsRequest, + ListDatabaseOperationsResponse, + ListDatabaseRolesRequest, + ListDatabaseRolesResponse, + ListDatabasesRequest, + ListDatabasesResponse, + OptimizeRestoredDatabaseMetadata, + RestoreDatabaseEncryptionConfig, + RestoreDatabaseMetadata, + RestoreDatabaseRequest, + RestoreInfo, + SplitPoints, + UpdateDatabaseDdlMetadata, + UpdateDatabaseDdlRequest, + UpdateDatabaseMetadata, + UpdateDatabaseRequest, + RestoreSourceType, +) + +__all__ = ( + 'Backup', + 'BackupInfo', + 'BackupInstancePartition', + 'CopyBackupEncryptionConfig', + 'CopyBackupMetadata', + 'CopyBackupRequest', + 'CreateBackupEncryptionConfig', + 'CreateBackupMetadata', + 'CreateBackupRequest', + 'DeleteBackupRequest', + 'FullBackupSpec', + 'GetBackupRequest', + 'IncrementalBackupSpec', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'UpdateBackupRequest', + 'BackupSchedule', + 'BackupScheduleSpec', + 'CreateBackupScheduleRequest', + 'CrontabSpec', + 'DeleteBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'UpdateBackupScheduleRequest', + 'EncryptionConfig', + 'EncryptionInfo', + 'OperationProgress', + 'DatabaseDialect', + 'AddSplitPointsRequest', + 'AddSplitPointsResponse', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'Database', + 'DatabaseRole', + 'DdlStatementActionInfo', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'GetDatabaseRequest', + 'InternalUpdateGraphOperationRequest', + 'InternalUpdateGraphOperationResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'ListDatabaseRolesRequest', + 'ListDatabaseRolesResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'OptimizeRestoredDatabaseMetadata', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'RestoreDatabaseRequest', + 'RestoreInfo', + 'SplitPoints', + 'UpdateDatabaseDdlMetadata', + 'UpdateDatabaseDdlRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseRequest', + 'RestoreSourceType', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py new file mode 100644 index 0000000000..d89cfa44b5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py @@ -0,0 +1,1097 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'Backup', + 'CreateBackupRequest', + 'CreateBackupMetadata', + 'CopyBackupRequest', + 'CopyBackupMetadata', + 'UpdateBackupRequest', + 'GetBackupRequest', + 'DeleteBackupRequest', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'BackupInfo', + 'CreateBackupEncryptionConfig', + 'CopyBackupEncryptionConfig', + 'FullBackupSpec', + 'IncrementalBackupSpec', + 'BackupInstancePartition', + }, +) + + +class Backup(proto.Message): + r"""A backup of a Cloud Spanner database. + + Attributes: + database (str): + Required for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. Name of the database from which this backup was + created. This needs to be in the same instance as the + backup. Values are of the form + ``projects//instances//databases/``. + version_time (google.protobuf.timestamp_pb2.Timestamp): + The backup will contain an externally consistent copy of the + database at the timestamp specified by ``version_time``. If + ``version_time`` is not specified, the system will set + ``version_time`` to the ``create_time`` of the backup. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. The expiration time of the backup, with + microseconds granularity that must be at least 6 hours and + at most 366 days from the time the CreateBackup request is + processed. Once the ``expire_time`` has passed, the backup + is eligible to be automatically deleted by Cloud Spanner to + free the resources used by the backup. + name (str): + Output only for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. Required for the + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup] + operation. + + A globally unique identifier for the backup which cannot be + changed. Values are of the form + ``projects//instances//backups/[a-z][a-z0-9_\-]*[a-z0-9]`` + The final segment of the name must be between 2 and 60 + characters in length. + + The backup is stored in the location(s) specified in the + instance configuration of the instance containing the + backup, identified by the prefix of the backup name of the + form ``projects//instances/``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request is received. If the request does not specify + ``version_time``, the ``version_time`` of the backup will be + equivalent to the ``create_time``. + size_bytes (int): + Output only. Size of the backup in bytes. + freeable_size_bytes (int): + Output only. The number of bytes that will be + freed by deleting this backup. This value will + be zero if, for example, this backup is part of + an incremental backup chain and younger backups + in the chain require that we keep its data. For + backups not in an incremental backup chain, this + is always the size of the backup. This value may + change if backups on the same chain get created, + deleted or expired. + exclusive_size_bytes (int): + Output only. For a backup in an incremental + backup chain, this is the storage space needed + to keep the data that has changed since the + previous backup. For all other backups, this is + always the size of the backup. This value may + change if backups on the same chain get deleted + or expired. + + This field can be used to calculate the total + storage space used by a set of backups. For + example, the total space used by all backups of + a database can be computed by summing up this + field. + state (google.cloud.spanner_admin_database_v1.types.Backup.State): + Output only. The current state of the backup. + referencing_databases (MutableSequence[str]): + Output only. The names of the restored databases that + reference the backup. The database names are of the form + ``projects//instances//databases/``. + Referencing databases may exist in different instances. The + existence of any referencing database prevents the backup + from being deleted. When a restored database from the backup + enters the ``READY`` state, the reference to the backup is + removed. + encryption_info (google.cloud.spanner_admin_database_v1.types.EncryptionInfo): + Output only. The encryption information for + the backup. + encryption_information (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): + Output only. The encryption information for the backup, + whether it is protected by one or more KMS keys. The + information includes all Cloud KMS key versions used to + encrypt the backup. The + ``encryption_status' field inside of each``\ EncryptionInfo\` + is not populated. At least one of the key versions must be + available for the backup to be restored. If a key version is + revoked in the middle of a restore, the restore behavior is + undefined. + database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): + Output only. The database dialect information + for the backup. + referencing_backups (MutableSequence[str]): + Output only. The names of the destination backups being + created by copying this source backup. The backup names are + of the form + ``projects//instances//backups/``. + Referencing backups may exist in different instances. The + existence of any referencing backup prevents the backup from + being deleted. When the copy operation is done (either + successfully completed or cancelled or the destination + backup is deleted), the reference to the backup is removed. + max_expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The max allowed expiration time of the backup, + with microseconds granularity. A backup's expiration time + can be configured in multiple APIs: CreateBackup, + UpdateBackup, CopyBackup. When updating or copying an + existing backup, the expiration time specified must be less + than ``Backup.max_expire_time``. + backup_schedules (MutableSequence[str]): + Output only. List of backup schedule URIs + that are associated with creating this backup. + This is only applicable for scheduled backups, + and is empty for on-demand backups. + + To optimize for storage, whenever possible, + multiple schedules are collapsed together to + create one backup. In such cases, this field + captures the list of all backup schedule URIs + that are associated with creating this backup. + If collapsing is not done, then this field + captures the single backup schedule URI + associated with creating this backup. + incremental_backup_chain_id (str): + Output only. Populated only for backups in an incremental + backup chain. Backups share the same chain id if and only if + they belong to the same incremental backup chain. Use this + field to determine which backups are part of the same + incremental backup chain. The ordering of backups in the + chain can be determined by ordering the backup + ``version_time``. + oldest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Data deleted at a time older + than this is guaranteed not to be retained in + order to support this backup. For a backup in an + incremental backup chain, this is the version + time of the oldest backup that exists or ever + existed in the chain. For all other backups, + this is the version time of the backup. This + field can be used to understand what data is + being retained by the backup system. + instance_partitions (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupInstancePartition]): + Output only. The instance partition(s) storing the backup. + + This is the same as the list of the instance partition(s) + that the database had footprint in at the backup's + ``version_time``. + """ + class State(proto.Enum): + r"""Indicates the current state of the backup. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The pending backup is still being created. Operations on the + backup may fail with ``FAILED_PRECONDITION`` in this state. + READY (2): + The backup is complete and ready for use. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + database: str = proto.Field( + proto.STRING, + number=2, + ) + version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + freeable_size_bytes: int = proto.Field( + proto.INT64, + number=15, + ) + exclusive_size_bytes: int = proto.Field( + proto.INT64, + number=16, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + referencing_databases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + encryption_info: common.EncryptionInfo = proto.Field( + proto.MESSAGE, + number=8, + message=common.EncryptionInfo, + ) + encryption_information: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=common.EncryptionInfo, + ) + database_dialect: common.DatabaseDialect = proto.Field( + proto.ENUM, + number=10, + enum=common.DatabaseDialect, + ) + referencing_backups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + max_expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + backup_schedules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + incremental_backup_chain_id: str = proto.Field( + proto.STRING, + number=17, + ) + oldest_version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + instance_partitions: MutableSequence['BackupInstancePartition'] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message='BackupInstancePartition', + ) + + +class CreateBackupRequest(proto.Message): + r"""The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + + Attributes: + parent (str): + Required. The name of the instance in which the backup will + be created. This must be the same instance that contains the + database the backup will be created from. The backup will be + stored in the location(s) specified in the instance + configuration of this instance. Values are of the form + ``projects//instances/``. + backup_id (str): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full backup + name of the form + ``projects//instances//backups/``. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to create. + encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): + Optional. The encryption configuration used to encrypt the + backup. If this field is not specified, the backup will use + the same encryption configuration as the database by + default, namely + [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] + = ``USE_DATABASE_ENCRYPTION``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: 'Backup' = proto.Field( + proto.MESSAGE, + number=3, + message='Backup', + ) + encryption_config: 'CreateBackupEncryptionConfig' = proto.Field( + proto.MESSAGE, + number=4, + message='CreateBackupEncryptionConfig', + ) + + +class CreateBackupMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + + Attributes: + name (str): + The name of the backup being created. + database (str): + The name of the database the backup is + created from. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of this operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=3, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class CopyBackupRequest(proto.Message): + r"""The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + + Attributes: + parent (str): + Required. The name of the destination instance that will + contain the backup copy. Values are of the form: + ``projects//instances/``. + backup_id (str): + Required. The id of the backup copy. The ``backup_id`` + appended to ``parent`` forms the full backup_uri of the form + ``projects//instances//backups/``. + source_backup (str): + Required. The source backup to be copied. The source backup + needs to be in READY state for it to be copied. Once + CopyBackup is in progress, the source backup cannot be + deleted or cleaned up on expiration until CopyBackup is + finished. Values are of the form: + ``projects//instances//backups/``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The expiration time of the backup in microsecond + granularity. The expiration time must be at least 6 hours + and at most 366 days from the ``create_time`` of the source + backup. Once the ``expire_time`` has passed, the backup is + eligible to be automatically deleted by Cloud Spanner to + free the resources used by the backup. + encryption_config (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig): + Optional. The encryption configuration used to encrypt the + backup. If this field is not specified, the backup will use + the same encryption configuration as the source backup by + default, namely + [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] + = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + source_backup: str = proto.Field( + proto.STRING, + number=3, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + encryption_config: 'CopyBackupEncryptionConfig' = proto.Field( + proto.MESSAGE, + number=5, + message='CopyBackupEncryptionConfig', + ) + + +class CopyBackupMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + + Attributes: + name (str): + The name of the backup being created through the copy + operation. Values are of the form + ``projects//instances//backups/``. + source_backup (str): + The name of the source backup that is being copied. Values + are of the form + ``projects//instances//backups/``. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of CopyBackup operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_backup: str = proto.Field( + proto.STRING, + number=2, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=3, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateBackupRequest(proto.Message): + r"""The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + + Attributes: + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only supported + for the following fields: + + - ``backup.expire_time``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be updated. + This mask is relative to the Backup resource, not to the + request message. The field mask must always be specified; + this prevents any future fields from being erased + accidentally by clients that do not know about them. + """ + + backup: 'Backup' = proto.Field( + proto.MESSAGE, + number=1, + message='Backup', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetBackupRequest(proto.Message): + r"""The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + + Attributes: + name (str): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupRequest(proto.Message): + r"""The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + + Attributes: + name (str): + Required. Name of the backup to delete. Values are of the + form + ``projects//instances//backups/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupsRequest(proto.Message): + r"""The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Attributes: + parent (str): + Required. The instance to list backups from. Values are of + the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned backups. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Backup][google.spanner.admin.database.v1.Backup] are + eligible for filtering: + + - ``name`` + - ``database`` + - ``state`` + - ``create_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``expire_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``version_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``size_bytes`` + - ``backup_schedules`` + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic, but you can specify AND, OR, and + NOT logic explicitly. + + Here are a few examples: + + - ``name:Howl`` - The backup's name contains the string + "howl". + - ``database:prod`` - The database's name contains the + string "prod". + - ``state:CREATING`` - The backup is pending creation. + - ``state:READY`` - The backup is fully created and ready + for use. + - ``(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`` + - The backup name contains the string "howl" and + ``create_time`` of the backup is before + 2018-03-28T14:50:00Z. + - ``expire_time < \"2018-03-28T14:50:00Z\"`` - The backup + ``expire_time`` is before 2018-03-28T14:50:00Z. + - ``size_bytes > 10000000000`` - The backup's size is + greater than 10GB + - ``backup_schedules:daily`` - The backup is created from a + schedule with "daily" in its name. + page_size (int): + Number of backups to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupsResponse.next_page_token] + from a previous + [ListBackupsResponse][google.spanner.admin.database.v1.ListBackupsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupsResponse(proto.Message): + r"""The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Attributes: + backups (MutableSequence[google.cloud.spanner_admin_database_v1.types.Backup]): + The list of matching backups. Backups returned are ordered + by ``create_time`` in descending order, starting from the + most recent ``create_time``. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups] + call to fetch more of the matching backups. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence['Backup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Backup', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBackupOperationsRequest(proto.Message): + r"""The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Attributes: + parent (str): + Required. The instance of the backup operations. Values are + of the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned backup + operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [operation][google.longrunning.Operation] are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] + is + ``type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first if filtering on + metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic, but you can specify AND, OR, and + NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``metadata.database:prod`` - Returns operations where: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + - The source database name of backup contains the string + "prod". + + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``(metadata.name:howl) AND`` + ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + - The backup name contains the string "howl". + - The operation started before 2018-03-28T14:50:00Z. + - The operation resulted in an error. + + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` + ``(metadata.source_backup:test) AND`` + ``(metadata.progress.start_time < \"2022-01-18T14:50:00Z\") AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + - The source backup name contains the string "test". + - The operation started before 2022-01-18T14:50:00Z. + - The operation resulted in an error. + + - ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``(metadata.database:test_db)) OR`` + ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` + ``(metadata.source_backup:test_bkp)) AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata matches either of criteria: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] + AND the source database name of the backup contains + the string "test_db" + - The operation's metadata type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata] + AND the source backup name contains the string + "test_bkp" + + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupOperationsResponse.next_page_token] + from a previous + [ListBackupOperationsResponse][google.spanner.admin.database.v1.ListBackupOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupOperationsResponse(proto.Message): + r"""The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching backup [long-running + operations][google.longrunning.Operation]. Each operation's + name will be prefixed by the backup's name. The operation's + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that are pending or have + completed/failed/canceled within the last 7 days. Operations + returned are ordered by + ``operation.metadata.value.progress.start_time`` in + descending order starting from the most recently started + operation. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BackupInfo(proto.Message): + r"""Information about a backup. + + Attributes: + backup (str): + Name of the backup. + version_time (google.protobuf.timestamp_pb2.Timestamp): + The backup contains an externally consistent copy of + ``source_database`` at the timestamp specified by + ``version_time``. If the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request did not specify ``version_time``, the + ``version_time`` of the backup is equivalent to the + ``create_time``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request was received. + source_database (str): + Name of the database the backup was created + from. + """ + + backup: str = proto.Field( + proto.STRING, + number=1, + ) + version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + source_database: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateBackupEncryptionConfig(proto.Message): + r"""Encryption configuration for the backup to create. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): + Required. The encryption type of the backup. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to protect the + backup. This field should be set only when + [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Optional. Specifies the KMS configuration for the one or + more keys used to protect the backup. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + + The keys referenced by kms_key_names must fully cover all + regions of the backup's instance configuration. Some + examples: + + - For single region instance configs, specify a single + regional location KMS key. + - For multi-regional instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For an instance config of type USER_MANAGED, please + specify only regional location KMS keys to cover each + region in the instance config. Multi-regional location KMS + keys are not supported for USER_MANAGED instance configs. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the backup. + + Values: + ENCRYPTION_TYPE_UNSPECIFIED (0): + Unspecified. Do not use. + USE_DATABASE_ENCRYPTION (1): + Use the same encryption configuration as the database. This + is the default option when + [encryption_config][google.spanner.admin.database.v1.CreateBackupEncryptionConfig] + is empty. For example, if the database is using + ``Customer_Managed_Encryption``, the backup will be using + the same Cloud KMS key as the database. + GOOGLE_DEFAULT_ENCRYPTION (2): + Use Google default encryption. + CUSTOMER_MANAGED_ENCRYPTION (3): + Use customer managed encryption. If specified, + ``kms_key_name`` must contain a valid Cloud KMS key. + """ + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_DATABASE_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type: EncryptionType = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CopyBackupEncryptionConfig(proto.Message): + r"""Encryption configuration for the copied backup. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig.EncryptionType): + Required. The encryption type of the backup. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to protect the + backup. This field should be set only when + [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Optional. Specifies the KMS configuration for the one or + more keys used to protect the backup. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + Kms keys specified can be in any order. + + The keys referenced by kms_key_names must fully cover all + regions of the backup's instance configuration. Some + examples: + + - For single region instance configs, specify a single + regional location KMS key. + - For multi-regional instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For an instance config of type USER_MANAGED, please + specify only regional location KMS keys to cover each + region in the instance config. Multi-regional location KMS + keys are not supported for USER_MANAGED instance configs. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the backup. + + Values: + ENCRYPTION_TYPE_UNSPECIFIED (0): + Unspecified. Do not use. + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): + This is the default option for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] + when + [encryption_config][google.spanner.admin.database.v1.CopyBackupEncryptionConfig] + is not specified. For example, if the source backup is using + ``Customer_Managed_Encryption``, the backup will be using + the same Cloud KMS key as the source backup. + GOOGLE_DEFAULT_ENCRYPTION (2): + Use Google default encryption. + CUSTOMER_MANAGED_ENCRYPTION (3): + Use customer managed encryption. If specified, either + ``kms_key_name`` or ``kms_key_names`` must contain valid + Cloud KMS key(s). + """ + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type: EncryptionType = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class FullBackupSpec(proto.Message): + r"""The specification for full backups. + A full backup stores the entire contents of the database at a + given version time. + + """ + + +class IncrementalBackupSpec(proto.Message): + r"""The specification for incremental backup chains. + An incremental backup stores the delta of changes between a + previous backup and the database contents at a given version + time. An incremental backup chain consists of a full backup and + zero or more successive incremental backups. The first backup + created for an incremental backup chain is always a full backup. + + """ + + +class BackupInstancePartition(proto.Message): + r"""Instance partition information for the backup. + + Attributes: + instance_partition (str): + A unique identifier for the instance partition. Values are + of the form + ``projects//instances//instancePartitions/`` + """ + + instance_partition: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py new file mode 100644 index 0000000000..b69e3b08ee --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'BackupScheduleSpec', + 'BackupSchedule', + 'CrontabSpec', + 'CreateBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'DeleteBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'UpdateBackupScheduleRequest', + }, +) + + +class BackupScheduleSpec(proto.Message): + r"""Defines specifications of the backup schedule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cron_spec (google.cloud.spanner_admin_database_v1.types.CrontabSpec): + Cron style schedule specification. + + This field is a member of `oneof`_ ``schedule_spec``. + """ + + cron_spec: 'CrontabSpec' = proto.Field( + proto.MESSAGE, + number=1, + oneof='schedule_spec', + message='CrontabSpec', + ) + + +class BackupSchedule(proto.Message): + r"""BackupSchedule expresses the automated backup creation + specification for a Spanner database. + Next ID: 10 + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. Output only for the + [CreateBackupSchedule][DatabaseAdmin.CreateBackupSchededule] + operation. Required for the + [UpdateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule] + operation. A globally unique identifier for the backup + schedule which cannot be changed. Values are of the form + ``projects//instances//databases//backupSchedules/[a-z][a-z0-9_\-]*[a-z0-9]`` + The final segment of the name must be between 2 and 60 + characters in length. + spec (google.cloud.spanner_admin_database_v1.types.BackupScheduleSpec): + Optional. The schedule specification based on + which the backup creations are triggered. + retention_duration (google.protobuf.duration_pb2.Duration): + Optional. The retention duration of a backup + that must be at least 6 hours and at most 366 + days. The backup is eligible to be automatically + deleted once the retention period has elapsed. + encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): + Optional. The encryption configuration that + will be used to encrypt the backup. If this + field is not specified, the backup will use the + same encryption configuration as the database. + full_backup_spec (google.cloud.spanner_admin_database_v1.types.FullBackupSpec): + The schedule creates only full backups. + + This field is a member of `oneof`_ ``backup_type_spec``. + incremental_backup_spec (google.cloud.spanner_admin_database_v1.types.IncrementalBackupSpec): + The schedule creates incremental backup + chains. + + This field is a member of `oneof`_ ``backup_type_spec``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which the + schedule was last updated. If the schedule has + never been updated, this field contains the + timestamp when the schedule was first created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + spec: 'BackupScheduleSpec' = proto.Field( + proto.MESSAGE, + number=6, + message='BackupScheduleSpec', + ) + retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + encryption_config: backup.CreateBackupEncryptionConfig = proto.Field( + proto.MESSAGE, + number=4, + message=backup.CreateBackupEncryptionConfig, + ) + full_backup_spec: backup.FullBackupSpec = proto.Field( + proto.MESSAGE, + number=7, + oneof='backup_type_spec', + message=backup.FullBackupSpec, + ) + incremental_backup_spec: backup.IncrementalBackupSpec = proto.Field( + proto.MESSAGE, + number=8, + oneof='backup_type_spec', + message=backup.IncrementalBackupSpec, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +class CrontabSpec(proto.Message): + r"""CrontabSpec can be used to specify the version time and + frequency at which the backup should be created. + + Attributes: + text (str): + Required. Textual representation of the crontab. User can + customize the backup frequency and the backup version time + using the cron expression. The version time must be in UTC + timezone. + + The backup will contain an externally consistent copy of the + database at the version time. Allowed frequencies are 12 + hour, 1 day, 1 week and 1 month. Examples of valid cron + specifications: + + - ``0 2/12 * * *`` : every 12 hours at (2, 14) hours past + midnight in UTC. + - ``0 2,14 * * *`` : every 12 hours at (2,14) hours past + midnight in UTC. + - ``0 2 * * *`` : once a day at 2 past midnight in UTC. + - ``0 2 * * 0`` : once a week every Sunday at 2 past + midnight in UTC. + - ``0 2 8 * *`` : once a month on 8th day at 2 past midnight + in UTC. + time_zone (str): + Output only. The time zone of the times in + ``CrontabSpec.text``. Currently only UTC is supported. + creation_window (google.protobuf.duration_pb2.Duration): + Output only. Schedule backups will contain an externally + consistent copy of the database at the version time + specified in ``schedule_spec.cron_spec``. However, Spanner + may not initiate the creation of the scheduled backups at + that version time. Spanner will initiate the creation of + scheduled backups within the time window bounded by the + version_time specified in ``schedule_spec.cron_spec`` and + version_time + ``creation_window``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + time_zone: str = proto.Field( + proto.STRING, + number=2, + ) + creation_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class CreateBackupScheduleRequest(proto.Message): + r"""The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + + Attributes: + parent (str): + Required. The name of the database that this + backup schedule applies to. + backup_schedule_id (str): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the full + backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_schedule_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_schedule: 'BackupSchedule' = proto.Field( + proto.MESSAGE, + number=3, + message='BackupSchedule', + ) + + +class GetBackupScheduleRequest(proto.Message): + r"""The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + + Attributes: + name (str): + Required. The name of the schedule to retrieve. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupScheduleRequest(proto.Message): + r"""The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + + Attributes: + name (str): + Required. The name of the schedule to delete. Values are of + the form + ``projects//instances//databases//backupSchedules/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupSchedulesRequest(proto.Message): + r"""The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Attributes: + parent (str): + Required. Database is the parent resource + whose backup schedules should be listed. Values + are of the form + projects//instances//databases/ + page_size (int): + Optional. Number of backup schedules to be + returned in the response. If 0 or less, defaults + to the server's maximum allowed page size. + page_token (str): + Optional. If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupSchedulesResponse.next_page_token] + from a previous + [ListBackupSchedulesResponse][google.spanner.admin.database.v1.ListBackupSchedulesResponse] + to the same ``parent``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupSchedulesResponse(proto.Message): + r"""The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Attributes: + backup_schedules (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupSchedule]): + The list of backup schedules for a database. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules] + call to fetch more of the schedules. + """ + + @property + def raw_page(self): + return self + + backup_schedules: MutableSequence['BackupSchedule'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BackupSchedule', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateBackupScheduleRequest(proto.Message): + r"""The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + + Attributes: + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated as + specified by ``update_mask`` are required. Other fields are + ignored. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + the BackupSchedule resource should be updated. + This mask is relative to the BackupSchedule + resource, not to the request message. The field + mask must always be specified; this prevents any + future fields from being erased accidentally. + """ + + backup_schedule: 'BackupSchedule' = proto.Field( + proto.MESSAGE, + number=1, + message='BackupSchedule', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py new file mode 100644 index 0000000000..40606ba0e5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'DatabaseDialect', + 'OperationProgress', + 'EncryptionConfig', + 'EncryptionInfo', + }, +) + + +class DatabaseDialect(proto.Enum): + r"""Indicates the dialect type of a database. + + Values: + DATABASE_DIALECT_UNSPECIFIED (0): + Default value. This value will create a database with the + GOOGLE_STANDARD_SQL dialect. + GOOGLE_STANDARD_SQL (1): + GoogleSQL supported SQL. + POSTGRESQL (2): + PostgreSQL supported SQL. + """ + DATABASE_DIALECT_UNSPECIFIED = 0 + GOOGLE_STANDARD_SQL = 1 + POSTGRESQL = 2 + + +class OperationProgress(proto.Message): + r"""Encapsulates progress related information for a Cloud Spanner + long running operation. + + Attributes: + progress_percent (int): + Percent completion of the operation. + Values are between 0 and 100 inclusive. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time the request was received. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If set, the time at which this operation + failed or was completed successfully. + """ + + progress_percent: int = proto.Field( + proto.INT32, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class EncryptionConfig(proto.Message): + r"""Encryption configuration for a Cloud Spanner database. + + Attributes: + kms_key_name (str): + The Cloud KMS key to be used for encrypting and decrypting + the database. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Specifies the KMS configuration for the one or more keys + used to encrypt the database. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + + The keys referenced by kms_key_names must fully cover all + regions of the database instance configuration. Some + examples: + + - For single region database instance configs, specify a + single regional location KMS key. + - For multi-regional database instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For a database instance config of type USER_MANAGED, + please specify only regional location KMS keys to cover + each region in the instance config. Multi-regional + location KMS keys are not supported for USER_MANAGED + instance configs. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class EncryptionInfo(proto.Message): + r"""Encryption information for a Cloud Spanner database or + backup. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.EncryptionInfo.Type): + Output only. The type of encryption. + encryption_status (google.rpc.status_pb2.Status): + Output only. If present, the status of a + recent encrypt/decrypt call on underlying data + for this database or backup. Regardless of + status, data is always encrypted at rest. + kms_key_version (str): + Output only. A Cloud KMS key version that is + being used to protect the database or backup. + """ + class Type(proto.Enum): + r"""Possible encryption types. + + Values: + TYPE_UNSPECIFIED (0): + Encryption type was not specified, though + data at rest remains encrypted. + GOOGLE_DEFAULT_ENCRYPTION (1): + The data is encrypted at rest with a key that + is fully managed by Google. No key version or + status will be populated. This is the default + state. + CUSTOMER_MANAGED_ENCRYPTION (2): + The data is encrypted at rest with a key that is managed by + the customer. The active version of the key. + ``kms_key_version`` will be populated, and + ``encryption_status`` may be populated. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DEFAULT_ENCRYPTION = 1 + CUSTOMER_MANAGED_ENCRYPTION = 2 + + encryption_type: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + encryption_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + kms_key_version: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py new file mode 100644 index 0000000000..3d882c8443 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -0,0 +1,1348 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'RestoreSourceType', + 'RestoreInfo', + 'Database', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'CreateDatabaseRequest', + 'CreateDatabaseMetadata', + 'GetDatabaseRequest', + 'UpdateDatabaseRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseDdlRequest', + 'DdlStatementActionInfo', + 'UpdateDatabaseDdlMetadata', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'RestoreDatabaseRequest', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'OptimizeRestoredDatabaseMetadata', + 'DatabaseRole', + 'ListDatabaseRolesRequest', + 'ListDatabaseRolesResponse', + 'AddSplitPointsRequest', + 'AddSplitPointsResponse', + 'SplitPoints', + 'InternalUpdateGraphOperationRequest', + 'InternalUpdateGraphOperationResponse', + }, +) + + +class RestoreSourceType(proto.Enum): + r"""Indicates the type of the restore source. + + Values: + TYPE_UNSPECIFIED (0): + No restore associated. + BACKUP (1): + A backup was used as the source of the + restore. + """ + TYPE_UNSPECIFIED = 0 + BACKUP = 1 + + +class RestoreInfo(proto.Message): + r"""Information about the database restore. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): + The type of the restore source. + backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): + Information about the backup used to restore + the database. The backup may no longer exist. + + This field is a member of `oneof`_ ``source_info``. + """ + + source_type: 'RestoreSourceType' = proto.Field( + proto.ENUM, + number=1, + enum='RestoreSourceType', + ) + backup_info: gsad_backup.BackupInfo = proto.Field( + proto.MESSAGE, + number=2, + oneof='source_info', + message=gsad_backup.BackupInfo, + ) + + +class Database(proto.Message): + r"""A Cloud Spanner database. + + Attributes: + name (str): + Required. The name of the database. Values are of the form + ``projects//instances//databases/``, + where ```` is as specified in the + ``CREATE DATABASE`` statement. This name can be passed to + other API methods to identify the database. + state (google.cloud.spanner_admin_database_v1.types.Database.State): + Output only. The current database state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If exists, the time at which the + database creation started. + restore_info (google.cloud.spanner_admin_database_v1.types.RestoreInfo): + Output only. Applicable only for restored + databases. Contains information about the + restore source. + encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): + Output only. For databases that are using + customer managed encryption, this field contains + the encryption configuration for the database. + For databases that are using Google default or + other types of encryption, this field is empty. + encryption_info (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): + Output only. For databases that are using customer managed + encryption, this field contains the encryption information + for the database, such as all Cloud KMS key versions that + are in use. The + ``encryption_status' field inside of each``\ EncryptionInfo\` + is not populated. + + For databases that are using Google default or other types + of encryption, this field is empty. + + This field is propagated lazily from the backend. There + might be a delay from when a key version is being used and + when it appears in this field. + version_retention_period (str): + Output only. The period in which Cloud Spanner retains all + versions of data for the database. This is the same as the + value of version_retention_period database option set using + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + Defaults to 1 hour, if not set. + earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Earliest timestamp at which + older versions of the data can be read. This + value is continuously updated by Cloud Spanner + and becomes stale the moment it is queried. If + you are using this value to recover data, make + sure to account for the time from the moment + when the value is queried to the moment when you + initiate the recovery. + default_leader (str): + Output only. The read-write region which contains the + database's leader replicas. + + This is the same as the value of default_leader database + option set using DatabaseAdmin.CreateDatabase or + DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this + is empty. + database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): + Output only. The dialect of the Cloud Spanner + Database. + enable_drop_protection (bool): + Whether drop protection is enabled for this database. + Defaults to false, if not set. For more details, please see + how to `prevent accidental database + deletion `__. + reconciling (bool): + Output only. If true, the database is being + updated. If false, there are no ongoing update + operations for the database. + """ + class State(proto.Enum): + r"""Indicates the current state of the database. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The database is still being created. Operations on the + database may fail with ``FAILED_PRECONDITION`` in this + state. + READY (2): + The database is fully created and ready for + use. + READY_OPTIMIZING (3): + The database is fully created and ready for use, but is + still being optimized for performance and cannot handle full + load. + + In this state, the database still references the backup it + was restore from, preventing the backup from being deleted. + When optimizations are complete, the full performance of the + database will be restored, and the database will transition + to ``READY`` state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + READY_OPTIMIZING = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + restore_info: 'RestoreInfo' = proto.Field( + proto.MESSAGE, + number=4, + message='RestoreInfo', + ) + encryption_config: common.EncryptionConfig = proto.Field( + proto.MESSAGE, + number=5, + message=common.EncryptionConfig, + ) + encryption_info: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=common.EncryptionInfo, + ) + version_retention_period: str = proto.Field( + proto.STRING, + number=6, + ) + earliest_version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + default_leader: str = proto.Field( + proto.STRING, + number=9, + ) + database_dialect: common.DatabaseDialect = proto.Field( + proto.ENUM, + number=10, + enum=common.DatabaseDialect, + ) + enable_drop_protection: bool = proto.Field( + proto.BOOL, + number=11, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=12, + ) + + +class ListDatabasesRequest(proto.Message): + r"""The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Attributes: + parent (str): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + page_size (int): + Number of databases to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] + from a previous + [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabasesResponse(proto.Message): + r"""The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Attributes: + databases (MutableSequence[google.cloud.spanner_admin_database_v1.types.Database]): + Databases that matched the request. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] + call to fetch more of the matching databases. + """ + + @property + def raw_page(self): + return self + + databases: MutableSequence['Database'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Database', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDatabaseRequest(proto.Message): + r"""The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + + Attributes: + parent (str): + Required. The name of the instance that will serve the new + database. Values are of the form + ``projects//instances/``. + create_statement (str): + Required. A ``CREATE DATABASE`` statement, which specifies + the ID of the new database. The database ID must conform to + the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be + between 2 and 30 characters in length. If the database ID is + a reserved word or if it contains a hyphen, the database ID + must be enclosed in backticks (:literal:`\``). + extra_statements (MutableSequence[str]): + Optional. A list of DDL statements to run + inside the newly created database. Statements + can create tables, indexes, etc. These + statements execute atomically with the creation + of the database: + + if there is an error in any statement, the + database is not created. + encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): + Optional. The encryption configuration for + the database. If this field is not specified, + Cloud Spanner will encrypt/decrypt all data at + rest using Google default encryption. + database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): + Optional. The dialect of the Cloud Spanner + Database. + proto_descriptors (bytes): + Optional. Proto descriptors used by CREATE/ALTER PROTO + BUNDLE statements in 'extra_statements' above. Contains a + protobuf-serialized + `google.protobuf.FileDescriptorSet `__. + To generate it, + `install `__ and + run ``protoc`` with --include_imports and + --descriptor_set_out. For example, to generate for + moon/shot/app.proto, run + + :: + + $protoc --proto_path=/app_path --proto_path=/lib_path \ + --include_imports \ + --descriptor_set_out=descriptors.data \ + moon/shot/app.proto + + For more details, see protobuffer `self + description `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + create_statement: str = proto.Field( + proto.STRING, + number=2, + ) + extra_statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + encryption_config: common.EncryptionConfig = proto.Field( + proto.MESSAGE, + number=4, + message=common.EncryptionConfig, + ) + database_dialect: common.DatabaseDialect = proto.Field( + proto.ENUM, + number=5, + enum=common.DatabaseDialect, + ) + proto_descriptors: bytes = proto.Field( + proto.BYTES, + number=6, + ) + + +class CreateDatabaseMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + + Attributes: + database (str): + The database being created. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseRequest(proto.Message): + r"""The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + + Attributes: + name (str): + Required. The name of the requested database. Values are of + the form + ``projects//instances//databases/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDatabaseRequest(proto.Message): + r"""The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + + Attributes: + database (google.cloud.spanner_admin_database_v1.types.Database): + Required. The database to update. The ``name`` field of the + database is of the form + ``projects//instances//databases/``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, only + ``enable_drop_protection`` field can be updated. + """ + + database: 'Database' = proto.Field( + proto.MESSAGE, + number=1, + message='Database', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class UpdateDatabaseMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + + Attributes: + request (google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest): + The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is + best-effort). + """ + + request: 'UpdateDatabaseRequest' = proto.Field( + proto.MESSAGE, + number=1, + message='UpdateDatabaseRequest', + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateDatabaseDdlRequest(proto.Message): + r"""Enqueues the given DDL statements to be applied, in order but not + necessarily all at once, to the database schema at some point (or + points) in the future. The server checks that the statements are + executable (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon later execution + (e.g., if a statement from another batch of statements is applied + first and it conflicts in some way, or if there is some data-related + problem like a ``NULL`` value in a column to which ``NOT NULL`` + would be added). If a statement fails, all subsequent statements in + the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be used with + the [Operations][google.longrunning.Operations] API to monitor + progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + + Attributes: + database (str): + Required. The database to update. + statements (MutableSequence[str]): + Required. DDL statements to be applied to the + database. + operation_id (str): + If empty, the new update request is assigned an + automatically-generated operation ID. Otherwise, + ``operation_id`` is used to construct the name of the + resulting [Operation][google.longrunning.Operation]. + + Specifying an explicit operation ID simplifies determining + whether the statements were executed in the event that the + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + call is replayed, or the return value is otherwise lost: the + [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] + and ``operation_id`` fields can be combined to form the + [name][google.longrunning.Operation.name] of the resulting + [longrunning.Operation][google.longrunning.Operation]: + ``/operations/``. + + ``operation_id`` should be unique within the database, and + must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that + automatically-generated operation IDs always begin with an + underscore. If the named operation already exists, + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + returns ``ALREADY_EXISTS``. + proto_descriptors (bytes): + Optional. Proto descriptors used by CREATE/ALTER PROTO + BUNDLE statements. Contains a protobuf-serialized + `google.protobuf.FileDescriptorSet `__. + To generate it, + `install `__ and + run ``protoc`` with --include_imports and + --descriptor_set_out. For example, to generate for + moon/shot/app.proto, run + + :: + + $protoc --proto_path=/app_path --proto_path=/lib_path \ + --include_imports \ + --descriptor_set_out=descriptors.data \ + moon/shot/app.proto + + For more details, see protobuffer `self + description `__. + throughput_mode (bool): + Optional. This field is exposed to be used by the Spanner + Migration Tool. For more details, see + `SMT `__. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + operation_id: str = proto.Field( + proto.STRING, + number=3, + ) + proto_descriptors: bytes = proto.Field( + proto.BYTES, + number=4, + ) + throughput_mode: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DdlStatementActionInfo(proto.Message): + r"""Action information extracted from a DDL statement. This proto is + used to display the brief info of the DDL statement for the + operation + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + + Attributes: + action (str): + The action for the DDL statement, e.g. + CREATE, ALTER, DROP, GRANT, etc. This field is a + non-empty string. + entity_type (str): + The entity type for the DDL statement, e.g. TABLE, INDEX, + VIEW, etc. This field can be empty string for some DDL + statement, e.g. for statement "ANALYZE", ``entity_type`` = + "". + entity_names (MutableSequence[str]): + The entity name(s) being operated on the DDL statement. E.g. + + 1. For statement "CREATE TABLE t1(...)", ``entity_names`` = + ["t1"]. + 2. For statement "GRANT ROLE r1, r2 ...", ``entity_names`` = + ["r1", "r2"]. + 3. For statement "ANALYZE", ``entity_names`` = []. + """ + + action: str = proto.Field( + proto.STRING, + number=1, + ) + entity_type: str = proto.Field( + proto.STRING, + number=2, + ) + entity_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class UpdateDatabaseDdlMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + + Attributes: + database (str): + The database being modified. + statements (MutableSequence[str]): + For an update this list contains all the + statements. For an individual statement, this + list contains only that statement. + commit_timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + Reports the commit timestamps of all statements that have + succeeded so far, where ``commit_timestamps[i]`` is the + commit timestamp for the statement ``statements[i]``. + throttled (bool): + Output only. When true, indicates that the + operation is throttled e.g. due to resource + constraints. When resources become available the + operation will resume and this field will be + false again. + progress (MutableSequence[google.cloud.spanner_admin_database_v1.types.OperationProgress]): + The progress of the + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + operations. All DDL statements will have continuously + updating progress, and ``progress[i]`` is the operation + progress for ``statements[i]``. Also, ``progress[i]`` will + have start time and end time populated with commit timestamp + of operation, as well as a progress of 100% once the + operation has completed. + actions (MutableSequence[google.cloud.spanner_admin_database_v1.types.DdlStatementActionInfo]): + The brief action info for the DDL statements. ``actions[i]`` + is the brief info for ``statements[i]``. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + commit_timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + throttled: bool = proto.Field( + proto.BOOL, + number=4, + ) + progress: MutableSequence[common.OperationProgress] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=common.OperationProgress, + ) + actions: MutableSequence['DdlStatementActionInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DdlStatementActionInfo', + ) + + +class DropDatabaseRequest(proto.Message): + r"""The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + + Attributes: + database (str): + Required. The database to be dropped. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseDdlRequest(proto.Message): + r"""The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + Attributes: + database (str): + Required. The database whose schema we wish to get. Values + are of the form + ``projects//instances//databases/`` + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseDdlResponse(proto.Message): + r"""The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + Attributes: + statements (MutableSequence[str]): + A list of formatted DDL statements defining + the schema of the database specified in the + request. + proto_descriptors (bytes): + Proto descriptors stored in the database. Contains a + protobuf-serialized + `google.protobuf.FileDescriptorSet `__. + For more details, see protobuffer `self + description `__. + """ + + statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + proto_descriptors: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ListDatabaseOperationsRequest(proto.Message): + r"""The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Attributes: + parent (str): + Required. The instance of the database operations. Values + are of the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Operation][google.longrunning.Operation] are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata] + is + ``type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first, if filtering + on metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND`` + ``(metadata.source_type:BACKUP) AND`` + ``(metadata.backup_info.backup:backup_howl) AND`` + ``(metadata.name:restored_howl) AND`` + ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + - The database is restored from a backup. + - The backup name contains "backup_howl". + - The restored database's name contains "restored_howl". + - The operation started before 2018-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabaseOperationsResponse.next_page_token] + from a previous + [ListDatabaseOperationsResponse][google.spanner.admin.database.v1.ListDatabaseOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabaseOperationsResponse(proto.Message): + r"""The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching database [long-running + operations][google.longrunning.Operation]. Each operation's + name will be prefixed by the database's name. The + operation's + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreDatabaseRequest(proto.Message): + r"""The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the instance in which to create the + restored database. This instance must be in the same project + and have the same instance configuration as the instance + containing the source backup. Values are of the form + ``projects//instances/``. + database_id (str): + Required. The id of the database to create and restore to. + This database must not already exist. The ``database_id`` + appended to ``parent`` forms the full database name of the + form + ``projects//instances//databases/``. + backup (str): + Name of the backup from which to restore. Values are of the + form + ``projects//instances//backups/``. + + This field is a member of `oneof`_ ``source``. + encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): + Optional. An encryption configuration describing the + encryption type and key resources in Cloud KMS used to + encrypt/decrypt the database to restore to. If this field is + not specified, the restored database will use the same + encryption configuration as the backup by default, namely + [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] + = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: str = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + encryption_config: 'RestoreDatabaseEncryptionConfig' = proto.Field( + proto.MESSAGE, + number=4, + message='RestoreDatabaseEncryptionConfig', + ) + + +class RestoreDatabaseEncryptionConfig(proto.Message): + r"""Encryption configuration for the restored database. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): + Required. The encryption type of the restored + database. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to + encrypt/decrypt the restored database. This field should be + set only when + [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Optional. Specifies the KMS configuration for the one or + more keys used to encrypt the database. Values are of the + form + ``projects//locations//keyRings//cryptoKeys/``. + + The keys referenced by kms_key_names must fully cover all + regions of the database instance configuration. Some + examples: + + - For single region database instance configs, specify a + single regional location KMS key. + - For multi-regional database instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For a database instance config of type USER_MANAGED, + please specify only regional location KMS keys to cover + each region in the instance config. Multi-regional + location KMS keys are not supported for USER_MANAGED + instance configs. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the database to be restored. + + Values: + ENCRYPTION_TYPE_UNSPECIFIED (0): + Unspecified. Do not use. + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): + This is the default option when + [encryption_config][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig] + is not specified. + GOOGLE_DEFAULT_ENCRYPTION (2): + Use Google default encryption. + CUSTOMER_MANAGED_ENCRYPTION (3): + Use customer managed encryption. If specified, + ``kms_key_name`` must must contain a valid Cloud KMS key. + """ + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type: EncryptionType = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class RestoreDatabaseMetadata(proto.Message): + r"""Metadata type for the long-running operation returned by + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the database being created and + restored to. + source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): + The type of the restore source. + backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): + Information about the backup used to restore + the database. + + This field is a member of `oneof`_ ``source_info``. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of this operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + optimize_database_operation_name (str): + If exists, the name of the long-running operation that will + be used to track the post-restore optimization process to + optimize the performance of the restored database, and + remove the dependency on the restore source. The name is of + the form + ``projects//instances//databases//operations/`` + where the is the name of database being created and restored + to. The metadata type of the long-running operation is + [OptimizeRestoredDatabaseMetadata][google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata]. + This long-running operation will be automatically created by + the system after the RestoreDatabase long-running operation + completes successfully. This operation will not be created + if the restore was not successful. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_type: 'RestoreSourceType' = proto.Field( + proto.ENUM, + number=2, + enum='RestoreSourceType', + ) + backup_info: gsad_backup.BackupInfo = proto.Field( + proto.MESSAGE, + number=3, + oneof='source_info', + message=gsad_backup.BackupInfo, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=4, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + optimize_database_operation_name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class OptimizeRestoredDatabaseMetadata(proto.Message): + r"""Metadata type for the long-running operation used to track + the progress of optimizations performed on a newly restored + database. This long-running operation is automatically created + by the system after the successful completion of a database + restore, and cannot be cancelled. + + Attributes: + name (str): + Name of the restored database being + optimized. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the post-restore + optimizations. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + + +class DatabaseRole(proto.Message): + r"""A Cloud Spanner database role. + + Attributes: + name (str): + Required. The name of the database role. Values are of the + form + ``projects//instances//databases//databaseRoles/`` + where ```` is as specified in the ``CREATE ROLE`` DDL + statement. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDatabaseRolesRequest(proto.Message): + r"""The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Attributes: + parent (str): + Required. The database whose roles should be listed. Values + are of the form + ``projects//instances//databases/``. + page_size (int): + Number of database roles to be returned in + the response. If 0 or less, defaults to the + server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabaseRolesResponse.next_page_token] + from a previous + [ListDatabaseRolesResponse][google.spanner.admin.database.v1.ListDatabaseRolesResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatabaseRolesResponse(proto.Message): + r"""The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Attributes: + database_roles (MutableSequence[google.cloud.spanner_admin_database_v1.types.DatabaseRole]): + Database roles that matched the request. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles] + call to fetch more of the matching roles. + """ + + @property + def raw_page(self): + return self + + database_roles: MutableSequence['DatabaseRole'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DatabaseRole', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AddSplitPointsRequest(proto.Message): + r"""The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + Attributes: + database (str): + Required. The database on whose tables/indexes split points + are to be added. Values are of the form + ``projects//instances//databases/``. + split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): + Required. The split points to add. + initiator (str): + Optional. A user-supplied tag associated with the split + points. For example, "intital_data_load", "special_event_1". + Defaults to "CloudAddSplitPointsAPI" if not specified. The + length of the tag must not exceed 50 characters,else will be + trimmed. Only valid UTF8 characters are allowed. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + split_points: MutableSequence['SplitPoints'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='SplitPoints', + ) + initiator: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AddSplitPointsResponse(proto.Message): + r"""The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + + +class SplitPoints(proto.Message): + r"""The split points of a table/index. + + Attributes: + table (str): + The table to split. + index (str): + The index to split. If specified, the ``table`` field must + refer to the index's base table. + keys (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints.Key]): + Required. The list of split keys, i.e., the + split boundaries. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The expiration timestamp of the + split points. A timestamp in the past means + immediate expiration. The maximum value can be + 30 days in the future. Defaults to 10 days in + the future if not specified. + """ + + class Key(proto.Message): + r"""A split key. + + Attributes: + key_parts (google.protobuf.struct_pb2.ListValue): + Required. The column values making up the + split key. + """ + + key_parts: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.ListValue, + ) + + table: str = proto.Field( + proto.STRING, + number=1, + ) + index: str = proto.Field( + proto.STRING, + number=2, + ) + keys: MutableSequence[Key] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Key, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class InternalUpdateGraphOperationRequest(proto.Message): + r"""Internal request proto, do not use directly. + + Attributes: + database (str): + Internal field, do not use directly. + operation_id (str): + Internal field, do not use directly. + vm_identity_token (str): + Internal field, do not use directly. + progress (float): + Internal field, do not use directly. + status (google.rpc.status_pb2.Status): + Internal field, do not use directly. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + operation_id: str = proto.Field( + proto.STRING, + number=2, + ) + vm_identity_token: str = proto.Field( + proto.STRING, + number=5, + ) + progress: float = proto.Field( + proto.DOUBLE, + number=3, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=6, + message=status_pb2.Status, + ) + + +class InternalUpdateGraphOperationResponse(proto.Message): + r"""Internal response proto, do not use directly. + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/mypy.ini b/owl-bot-staging/spanner_admin_database/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_database/v1/noxfile.py b/owl-bot-staging/spanner_admin_database/v1/noxfile.py new file mode 100644 index 0000000000..feed1a3ac8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/noxfile.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] + +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-spanner-admin-database" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=PREVIEW_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json new file mode 100644 index 0000000000..e89008727d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -0,0 +1,4480 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.spanner.admin.database.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-spanner-admin-database", + "version": "0.0.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.add_split_points", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "AddSplitPoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "split_points", + "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", + "shortName": "add_split_points" + }, + "description": "Sample for AddSplitPoints", + "file": "spanner_v1_generated_database_admin_add_split_points_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_add_split_points_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.add_split_points", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "AddSplitPoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "split_points", + "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", + "shortName": "add_split_points" + }, + "description": "Sample for AddSplitPoints", + "file": "spanner_v1_generated_database_admin_add_split_points_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_add_split_points_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.copy_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CopyBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "source_backup", + "type": "str" + }, + { + "name": "expire_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "copy_backup" + }, + "description": "Sample for CopyBackup", + "file": "spanner_v1_generated_database_admin_copy_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_copy_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.copy_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CopyBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "source_backup", + "type": "str" + }, + { + "name": "expire_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "copy_backup" + }, + "description": "Sample for CopyBackup", + "file": "spanner_v1_generated_database_admin_copy_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_copy_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "backup_schedule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "spanner_v1_generated_database_admin_create_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "backup_schedule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "spanner_v1_generated_database_admin_create_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "spanner_v1_generated_database_admin_create_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "create_statement", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "spanner_v1_generated_database_admin_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "create_statement", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "spanner_v1_generated_database_admin_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "spanner_v1_generated_database_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "spanner_v1_generated_database_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.drop_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DropDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "drop_database" + }, + "description": "Sample for DropDatabase", + "file": "spanner_v1_generated_database_admin_drop_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_drop_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.drop_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DropDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "drop_database" + }, + "description": "Sample for DropDatabase", + "file": "spanner_v1_generated_database_admin_drop_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_drop_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" + }, + "description": "Sample for GetBackupSchedule", + "file": "spanner_v1_generated_database_admin_get_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" + }, + "description": "Sample for GetBackupSchedule", + "file": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "spanner_v1_generated_database_admin_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "spanner_v1_generated_database_admin_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", + "shortName": "get_database_ddl" + }, + "description": "Sample for GetDatabaseDdl", + "file": "spanner_v1_generated_database_admin_get_database_ddl_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_ddl_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", + "shortName": "get_database_ddl" + }, + "description": "Sample for GetDatabaseDdl", + "file": "spanner_v1_generated_database_admin_get_database_ddl_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_ddl_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "spanner_v1_generated_database_admin_get_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "spanner_v1_generated_database_admin_get_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_database_admin_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_database_admin_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.internal_update_graph_operation", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "InternalUpdateGraphOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "operation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", + "shortName": "internal_update_graph_operation" + }, + "description": "Sample for InternalUpdateGraphOperation", + "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.internal_update_graph_operation", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "InternalUpdateGraphOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "operation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", + "shortName": "internal_update_graph_operation" + }, + "description": "Sample for InternalUpdateGraphOperation", + "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager", + "shortName": "list_backup_operations" + }, + "description": "Sample for ListBackupOperations", + "file": "spanner_v1_generated_database_admin_list_backup_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager", + "shortName": "list_backup_operations" + }, + "description": "Sample for ListBackupOperations", + "file": "spanner_v1_generated_database_admin_list_backup_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_schedules", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupSchedules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager", + "shortName": "list_backup_schedules" + }, + "description": "Sample for ListBackupSchedules", + "file": "spanner_v1_generated_database_admin_list_backup_schedules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_schedules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_schedules", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupSchedules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager", + "shortName": "list_backup_schedules" + }, + "description": "Sample for ListBackupSchedules", + "file": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backups", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "spanner_v1_generated_database_admin_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backups", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "spanner_v1_generated_database_admin_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager", + "shortName": "list_database_operations" + }, + "description": "Sample for ListDatabaseOperations", + "file": "spanner_v1_generated_database_admin_list_database_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager", + "shortName": "list_database_operations" + }, + "description": "Sample for ListDatabaseOperations", + "file": "spanner_v1_generated_database_admin_list_database_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_roles", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseRoles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager", + "shortName": "list_database_roles" + }, + "description": "Sample for ListDatabaseRoles", + "file": "spanner_v1_generated_database_admin_list_database_roles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_roles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_roles", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseRoles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager", + "shortName": "list_database_roles" + }, + "description": "Sample for ListDatabaseRoles", + "file": "spanner_v1_generated_database_admin_list_database_roles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_roles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_databases", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "spanner_v1_generated_database_admin_list_databases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_databases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_databases", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "spanner_v1_generated_database_admin_list_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.restore_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "RestoreDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "backup", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_database" + }, + "description": "Sample for RestoreDatabase", + "file": "spanner_v1_generated_database_admin_restore_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_restore_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.restore_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "RestoreDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "backup", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_database" + }, + "description": "Sample for RestoreDatabase", + "file": "spanner_v1_generated_database_admin_restore_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_restore_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_database_admin_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_database_admin_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_database_admin_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "spanner_v1_generated_database_admin_update_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "spanner_v1_generated_database_admin_update_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "spanner_v1_generated_database_admin_update_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "statements", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_database_ddl" + }, + "description": "Sample for UpdateDatabaseDdl", + "file": "spanner_v1_generated_database_admin_update_database_ddl_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_ddl_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "statements", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_database_ddl" + }, + "description": "Sample for UpdateDatabaseDdl", + "file": "spanner_v1_generated_database_admin_update_database_ddl_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_ddl_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.spanner_admin_database_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "spanner_v1_generated_database_admin_update_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.spanner_admin_database_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "spanner_v1_generated_database_admin_update_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_sync.py" + } + ] +} diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py new file mode 100644 index 0000000000..ff6fcfe598 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddSplitPoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = await client.add_split_points(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py new file mode 100644 index 0000000000..3819bbe986 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddSplitPoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = client.add_split_points(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py new file mode 100644 index 0000000000..d885947bb5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py new file mode 100644 index 0000000000..a571e058c9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py new file mode 100644 index 0000000000..2ad8881f54 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py new file mode 100644 index 0000000000..efdcc2457e --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py new file mode 100644 index 0000000000..60d4b50c3b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py new file mode 100644 index 0000000000..02b9d1f0e7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py new file mode 100644 index 0000000000..47399a8d40 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py new file mode 100644 index 0000000000..6f112cd8a7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py new file mode 100644 index 0000000000..ab10785105 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py new file mode 100644 index 0000000000..591d45cb10 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py new file mode 100644 index 0000000000..720417ba65 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py new file mode 100644 index 0000000000..736dc56a23 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + client.delete_backup(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py new file mode 100644 index 0000000000..15f279b72d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DropDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + await client.drop_database(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py new file mode 100644 index 0000000000..f218cabd83 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DropDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + client.drop_database(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py new file mode 100644 index 0000000000..58b93a119a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py new file mode 100644 index 0000000000..5a37eec975 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py new file mode 100644 index 0000000000..4006cac333 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py new file mode 100644 index 0000000000..16cffcd78d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py new file mode 100644 index 0000000000..fd8621c27b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py new file mode 100644 index 0000000000..8e84b21f78 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = await client.get_database_ddl(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py new file mode 100644 index 0000000000..495b557a55 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = client.get_database_ddl(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py new file mode 100644 index 0000000000..ab729bb9e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py new file mode 100644 index 0000000000..d5d75de78b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py new file mode 100644 index 0000000000..75e0b48b1b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py new file mode 100644 index 0000000000..556205a0aa --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InternalUpdateGraphOperation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = await client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py new file mode 100644 index 0000000000..46f1a3c88f --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InternalUpdateGraphOperation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py new file mode 100644 index 0000000000..a56ec9f80e --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py new file mode 100644 index 0000000000..6383e1b247 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py new file mode 100644 index 0000000000..25ac53891a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py new file mode 100644 index 0000000000..89cf82d278 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py new file mode 100644 index 0000000000..140e519e07 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackups_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py new file mode 100644 index 0000000000..9f04036f74 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackups_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py new file mode 100644 index 0000000000..3bc614b232 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py new file mode 100644 index 0000000000..3d4dc965a9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py new file mode 100644 index 0000000000..46ec91ce89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseRoles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py new file mode 100644 index 0000000000..d39e4759dd --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseRoles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py new file mode 100644 index 0000000000..586dfa56f1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py new file mode 100644 index 0000000000..e6ef221af6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py new file mode 100644 index 0000000000..384c063c61 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py new file mode 100644 index 0000000000..a327a8ae13 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py new file mode 100644 index 0000000000..edade4c950 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py new file mode 100644 index 0000000000..28a6746f4a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py new file mode 100644 index 0000000000..0e6ea91cb3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py new file mode 100644 index 0000000000..3fd0316dc1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py new file mode 100644 index 0000000000..95fa2a63f6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = await client.update_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py new file mode 100644 index 0000000000..de17dfc86e --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py new file mode 100644 index 0000000000..4ef64a0673 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py new file mode 100644 index 0000000000..9dbb0148dc --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = client.update_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py new file mode 100644 index 0000000000..d5588c3036 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py new file mode 100644 index 0000000000..ad98e2da9c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py new file mode 100644 index 0000000000..73297524b9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py new file mode 100644 index 0000000000..62ed40bc84 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py new file mode 100644 index 0000000000..d642e9a0e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py @@ -0,0 +1,202 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spanner_admin_databaseCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'add_split_points': ('database', 'split_points', 'initiator', ), + 'copy_backup': ('parent', 'backup_id', 'source_backup', 'expire_time', 'encryption_config', ), + 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), + 'create_backup_schedule': ('parent', 'backup_schedule_id', 'backup_schedule', ), + 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', 'database_dialect', 'proto_descriptors', ), + 'delete_backup': ('name', ), + 'delete_backup_schedule': ('name', ), + 'drop_database': ('database', ), + 'get_backup': ('name', ), + 'get_backup_schedule': ('name', ), + 'get_database': ('name', ), + 'get_database_ddl': ('database', ), + 'get_iam_policy': ('resource', 'options', ), + 'internal_update_graph_operation': ('database', 'operation_id', 'vm_identity_token', 'progress', 'status', ), + 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backup_schedules': ('parent', 'page_size', 'page_token', ), + 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_database_roles': ('parent', 'page_size', 'page_token', ), + 'list_databases': ('parent', 'page_size', 'page_token', ), + 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_backup': ('backup', 'update_mask', ), + 'update_backup_schedule': ('backup_schedule', 'update_mask', ), + 'update_database': ('database', 'update_mask', ), + 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', 'throughput_mode', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spanner_admin_databaseCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner_admin_database client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_database/v1/setup.py b/owl-bot-staging/spanner_admin_database/v1/setup.py new file mode 100644 index 0000000000..64017e80e7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/setup.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-spanner-admin-database' + + +description = "Google Cloud Spanner Admin Database API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/spanner_admin_database/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-database" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..5b1ee6c35a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt @@ -0,0 +1,13 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py new file mode 100644 index 0000000000..0ba71c42e0 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -0,0 +1,22226 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import re +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminAsyncClient +from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminClient +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.services.database_admin import transports +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DatabaseAdminClient._get_default_mtls_endpoint(None) is None + assert DatabaseAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DatabaseAdminClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DatabaseAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DatabaseAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DatabaseAdminClient._get_client_cert_source(None, False) is None + assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DatabaseAdminClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DatabaseAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + assert DatabaseAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DatabaseAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DatabaseAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DatabaseAdminClient._get_universe_domain(None, None) == DatabaseAdminClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DatabaseAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatabaseAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatabaseAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (DatabaseAdminClient, "grpc"), + (DatabaseAdminAsyncClient, "grpc_asyncio"), + (DatabaseAdminClient, "rest"), +]) +def test_database_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DatabaseAdminGrpcTransport, "grpc"), + (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DatabaseAdminRestTransport, "rest"), +]) +def test_database_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DatabaseAdminClient, "grpc"), + (DatabaseAdminAsyncClient, "grpc_asyncio"), + (DatabaseAdminClient, "rest"), +]) +def test_database_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +def test_database_admin_client_get_transport_class(): + transport = DatabaseAdminClient.get_transport_class() + available_transports = [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminRestTransport, + ] + assert transport in available_transports + + transport = DatabaseAdminClient.get_transport_class("grpc") + assert transport == transports.DatabaseAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), +]) +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +def test_database_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "true"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "false"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "true"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "false"), +]) +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_database_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DatabaseAdminClient, DatabaseAdminAsyncClient +]) +@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) +def test_database_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DatabaseAdminClient, DatabaseAdminAsyncClient +]) +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +def test_database_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), +]) +def test_database_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", None), +]) +def test_database_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_database_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DatabaseAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_database_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=None, + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabasesRequest, + dict, +]) +def test_list_databases(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_databases_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.ListDatabasesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_databases(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabasesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_databases_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_databases_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_databases in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_databases] = mock_rpc + + request = {} + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabasesRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + +def test_list_databases_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabasesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabasesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_databases_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_databases(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in results) +def test_list_databases_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_databases(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.CreateDatabaseRequest, + dict, +]) +def test_create_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.CreateDatabaseRequest( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.CreateDatabaseRequest( + parent='parent_value', + create_statement='create_statement_value', + ) + +def test_create_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_database] = mock_rpc + + request = {} + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.CreateDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + +def test_create_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.CreateDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.CreateDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].create_statement + mock_val = 'create_statement_value' + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].create_statement + mock_val = 'create_statement_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseRequest, + dict, +]) +def test_get_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + ) + response = client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +def test_get_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.GetDatabaseRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseRequest( + name='name_value', + ) + +def test_get_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database] = mock_rpc + request = {} + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_database] = mock_rpc + + request = {} + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + )) + response = await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_database_async_from_dict(): + await test_get_database_async(request_type=dict) + +def test_get_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = spanner_database_admin.Database() + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseRequest, + dict, +]) +def test_update_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.UpdateDatabaseRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseRequest( + ) + +def test_update_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + request = {} + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_database] = mock_rpc + + request = {} + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_database_async_from_dict(): + await test_update_database_async(request_type=dict) + +def test_update_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseRequest() + + request.database.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseRequest() + + request.database.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database.name=name_value', + ) in kw['metadata'] + + +def test_update_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database( + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = spanner_database_admin.Database(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database( + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = spanner_database_admin.Database(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseDdlRequest, + dict, +]) +def test_update_database_ddl(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_database_ddl_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.UpdateDatabaseDdlRequest( + database='database_value', + operation_id='operation_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_database_ddl(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest( + database='database_value', + operation_id='operation_id_value', + ) + +def test_update_database_ddl_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc + request = {} + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_database_ddl in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_database_ddl] = mock_rpc + + request = {} + await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_database_ddl_async_from_dict(): + await test_update_database_ddl_async(request_type=dict) + +def test_update_database_ddl_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_database_ddl_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_update_database_ddl_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database_ddl( + database='database_value', + statements=['statements_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].statements + mock_val = ['statements_value'] + assert arg == mock_val + + +def test_update_database_ddl_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + +@pytest.mark.asyncio +async def test_update_database_ddl_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database_ddl( + database='database_value', + statements=['statements_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].statements + mock_val = ['statements_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_database_ddl_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.DropDatabaseRequest, + dict, +]) +def test_drop_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.DropDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_drop_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.DropDatabaseRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.drop_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.DropDatabaseRequest( + database='database_value', + ) + +def test_drop_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.drop_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc + request = {} + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_drop_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.drop_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.drop_database] = mock_rpc + + request = {} + await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_drop_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.DropDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.DropDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_drop_database_async_from_dict(): + await test_drop_database_async(request_type=dict) + +def test_drop_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.DropDatabaseRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = None + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_drop_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.DropDatabaseRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_drop_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.drop_database( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_drop_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_drop_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.drop_database( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_drop_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseDdlRequest, + dict, +]) +def test_get_database_ddl(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + ) + response = client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + assert response.proto_descriptors == b'proto_descriptors_blob' + + +def test_get_database_ddl_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.GetDatabaseDdlRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_database_ddl(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseDdlRequest( + database='database_value', + ) + +def test_get_database_ddl_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc + request = {} + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_database_ddl in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_database_ddl] = mock_rpc + + request = {} + await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseDdlRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + )) + response = await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + assert response.proto_descriptors == b'proto_descriptors_blob' + + +@pytest.mark.asyncio +async def test_get_database_ddl_async_from_dict(): + await test_get_database_ddl_async(request_type=dict) + +def test_get_database_ddl_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_ddl_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) + await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_get_database_ddl_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database_ddl( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_get_database_ddl_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_get_database_ddl_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database_ddl( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_database_ddl_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + +def test_test_iam_permissions_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.CreateBackupRequest, + dict, +]) +def test_create_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup.CreateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup.CreateBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.CreateBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + ) + +def test_create_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + request = {} + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_backup] = mock_rpc + + request = {} + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.CreateBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup.CreateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + +def test_create_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.CreateBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.CreateBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + + +def test_create_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup.CopyBackupRequest, + dict, +]) +def test_copy_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.CopyBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_copy_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.CopyBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.copy_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.CopyBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + ) + +def test_copy_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + request = {} + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_copy_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.copy_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.copy_backup] = mock_rpc + + request = {} + await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_copy_backup_async(transport: str = 'grpc_asyncio', request_type=backup.CopyBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.CopyBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_backup_async_from_dict(): + await test_copy_backup_async(request_type=dict) + +def test_copy_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.CopyBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_copy_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.CopyBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_copy_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.copy_backup( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + arg = args[0].source_backup + mock_val = 'source_backup_value' + assert arg == mock_val + assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) + + +def test_copy_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.copy_backup( + backup.CopyBackupRequest(), + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + +@pytest.mark.asyncio +async def test_copy_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.copy_backup( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + arg = args[0].source_backup + mock_val = 'source_backup_value' + assert arg == mock_val + assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) + +@pytest.mark.asyncio +async def test_copy_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.copy_backup( + backup.CopyBackupRequest(), + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize("request_type", [ + backup.GetBackupRequest, + dict, +]) +def test_get_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.GetBackupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest( + name='name_value', + ) + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_backup] = mock_rpc + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backup.GetBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + +def test_get_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = backup.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.UpdateBackupRequest, + dict, +]) +def test_update_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup.UpdateBackupRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.UpdateBackupRequest( + ) + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_backup] = mock_rpc + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.UpdateBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + +def test_update_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.UpdateBackupRequest() + + request.backup.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = gsad_backup.Backup() + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.UpdateBackupRequest() + + request.backup.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup.name=name_value', + ) in kw['metadata'] + + +def test_update_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + backup.DeleteBackupRequest, + dict, +]) +def test_delete_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.DeleteBackupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest( + name='name_value', + ) + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_backup] = mock_rpc + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backup.DeleteBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + +def test_delete_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = None + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupsRequest, + dict, +]) +def test_list_backups(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.ListBackupsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_backups in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_backups] = mock_rpc + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + +def test_list_backups_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = backup.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backups_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) + for i in results) +def test_list_backups_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup.Backup) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.RestoreDatabaseRequest, + dict, +]) +def test_restore_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.RestoreDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.RestoreDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.restore_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.RestoreDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + +def test_restore_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc + request = {} + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_restore_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.restore_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.restore_database] = mock_rpc + + request = {} + await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.RestoreDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.RestoreDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_database_async_from_dict(): + await test_restore_database_async(request_type=dict) + +def test_restore_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.RestoreDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_restore_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.RestoreDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_restore_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_database( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].database_id + mock_val = 'database_id_value' + assert arg == mock_val + assert args[0].backup == 'backup_value' + + +def test_restore_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + +@pytest.mark.asyncio +async def test_restore_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_database( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].database_id + mock_val = 'database_id_value' + assert arg == mock_val + assert args[0].backup == 'backup_value' + +@pytest.mark.asyncio +async def test_restore_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseOperationsRequest, + dict, +]) +def test_list_database_operations(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_database_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.ListDatabaseOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_database_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_database_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc + request = {} + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_database_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_database_operations] = mock_rpc + + request = {} + await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_database_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseOperationsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_database_operations_async_from_dict(): + await test_list_database_operations_async(request_type=dict) + +def test_list_database_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_database_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) + await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_database_operations_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_database_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_database_operations_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_database_operations_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_database_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_database_operations_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + + +def test_list_database_operations_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_database_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_database_operations_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_database_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_database_operations_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_database_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_database_operations_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_database_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupOperationsRequest, + dict, +]) +def test_list_backup_operations(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.ListBackupOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backup_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.ListBackupOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_backup_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_backup_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc + request = {} + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_backup_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_backup_operations] = mock_rpc + + request = {} + await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_operations_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupOperationsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.ListBackupOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backup_operations_async_from_dict(): + await test_list_backup_operations_async(request_type=dict) + +def test_list_backup_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = backup.ListBackupOperationsResponse() + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backup_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) + await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backup_operations_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backup_operations_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backup_operations_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backup_operations_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + + +def test_list_backup_operations_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backup_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_backup_operations_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backup_operations_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_operations_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseRolesRequest, + dict, +]) +def test_list_database_roles(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseRolesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseRolesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_database_roles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.ListDatabaseRolesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_database_roles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseRolesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_database_roles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_roles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc + request = {} + client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_roles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_roles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_database_roles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_database_roles] = mock_rpc + + request = {} + await client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_database_roles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_roles_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseRolesRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseRolesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseRolesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_database_roles_async_from_dict(): + await test_list_database_roles_async(request_type=dict) + +def test_list_database_roles_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseRolesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_database_roles_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseRolesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) + await client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_database_roles_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_database_roles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_database_roles_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_database_roles_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_database_roles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_database_roles_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent='parent_value', + ) + + +def test_list_database_roles_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_database_roles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) + for i in results) +def test_list_database_roles_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + pages = list(client.list_database_roles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_database_roles_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_database_roles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_database_roles_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_database_roles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.AddSplitPointsRequest, + dict, +]) +def test_add_split_points(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.AddSplitPointsResponse( + ) + response = client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.AddSplitPointsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) + + +def test_add_split_points_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.AddSplitPointsRequest( + database='database_value', + initiator='initiator_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.add_split_points(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.AddSplitPointsRequest( + database='database_value', + initiator='initiator_value', + ) + +def test_add_split_points_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_split_points in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc + request = {} + client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.add_split_points(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_add_split_points_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.add_split_points in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.add_split_points] = mock_rpc + + request = {} + await client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.add_split_points(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_add_split_points_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.AddSplitPointsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( + )) + response = await client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.AddSplitPointsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) + + +@pytest.mark.asyncio +async def test_add_split_points_async_from_dict(): + await test_add_split_points_async(request_type=dict) + +def test_add_split_points_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.AddSplitPointsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value = spanner_database_admin.AddSplitPointsResponse() + client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_add_split_points_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.AddSplitPointsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) + await client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_add_split_points_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.AddSplitPointsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.add_split_points( + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].split_points + mock_val = [spanner_database_admin.SplitPoints(table='table_value')] + assert arg == mock_val + + +def test_add_split_points_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_split_points( + spanner_database_admin.AddSplitPointsRequest(), + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + +@pytest.mark.asyncio +async def test_add_split_points_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.AddSplitPointsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.add_split_points( + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].split_points + mock_val = [spanner_database_admin.SplitPoints(table='table_value')] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_add_split_points_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.add_split_points( + spanner_database_admin.AddSplitPointsRequest(), + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.CreateBackupScheduleRequest, + dict, +]) +def test_create_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + response = client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.CreateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup_schedule.CreateBackupScheduleRequest( + parent='parent_value', + backup_schedule_id='backup_schedule_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest( + parent='parent_value', + backup_schedule_id='backup_schedule_id_value', + ) + +def test_create_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc + request = {} + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_backup_schedule] = mock_rpc + + request = {} + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.CreateBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + response = await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.CreateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_backup_schedule_async_from_dict(): + await test_create_backup_schedule_async(request_type=dict) + +def test_create_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.CreateBackupScheduleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.CreateBackupScheduleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_schedule( + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].backup_schedule_id + mock_val = 'backup_schedule_id_value' + assert arg == mock_val + + +def test_create_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_schedule( + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].backup_schedule_id + mock_val = 'backup_schedule_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.GetBackupScheduleRequest, + dict, +]) +def test_get_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule( + name='name_value', + ) + response = client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.GetBackupScheduleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.GetBackupScheduleRequest( + name='name_value', + ) + +def test_get_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc + request = {} + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_backup_schedule] = mock_rpc + + request = {} + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.GetBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( + name='name_value', + )) + response = await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_from_dict(): + await test_get_backup_schedule_async(request_type=dict) + +def test_get_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.GetBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = backup_schedule.BackupSchedule() + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.GetBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.UpdateBackupScheduleRequest, + dict, +]) +def test_update_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + response = client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup_schedule.UpdateBackupScheduleRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest( + ) + +def test_update_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc + request = {} + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_backup_schedule] = mock_rpc + + request = {} + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + response = await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_from_dict(): + await test_update_backup_schedule_async(request_type=dict) + +def test_update_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + request.backup_schedule.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup_schedule.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + request.backup_schedule.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup_schedule.name=name_value', + ) in kw['metadata'] + + +def test_update_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_schedule( + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_schedule( + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.DeleteBackupScheduleRequest, + dict, +]) +def test_delete_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.DeleteBackupScheduleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.DeleteBackupScheduleRequest( + name='name_value', + ) + +def test_delete_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc + request = {} + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_backup_schedule] = mock_rpc + + request = {} + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.DeleteBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_from_dict(): + await test_delete_backup_schedule_async(request_type=dict) + +def test_delete_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.DeleteBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = None + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.DeleteBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.ListBackupSchedulesRequest, + dict, +]) +def test_list_backup_schedules(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.ListBackupSchedulesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_backup_schedules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.ListBackupSchedulesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_backup_schedules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_schedules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc + request = {} + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_backup_schedules in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_backup_schedules] = mock_rpc + + request = {} + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_schedules_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.ListBackupSchedulesRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_from_dict(): + await test_list_backup_schedules_async(request_type=dict) + +def test_list_backup_schedules_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.ListBackupSchedulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = backup_schedule.ListBackupSchedulesResponse() + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backup_schedules_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.ListBackupSchedulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backup_schedules_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_schedules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backup_schedules_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_schedules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent='parent_value', + ) + + +def test_list_backup_schedules_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backup_schedules(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) + for i in results) +def test_list_backup_schedules_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_schedules(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_schedules(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_schedules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.InternalUpdateGraphOperationRequest, + dict, +]) +def test_internal_update_graph_operation(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse( + ) + response = client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.InternalUpdateGraphOperationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) + + +def test_internal_update_graph_operation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.InternalUpdateGraphOperationRequest( + database='database_value', + operation_id='operation_id_value', + vm_identity_token='vm_identity_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.internal_update_graph_operation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.InternalUpdateGraphOperationRequest( + database='database_value', + operation_id='operation_id_value', + vm_identity_token='vm_identity_token_value', + ) + +def test_internal_update_graph_operation_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.internal_update_graph_operation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.internal_update_graph_operation] = mock_rpc + request = {} + client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.internal_update_graph_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.internal_update_graph_operation in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.internal_update_graph_operation] = mock_rpc + + request = {} + await client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.internal_update_graph_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.InternalUpdateGraphOperationRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( + )) + response = await client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.InternalUpdateGraphOperationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) + + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_async_from_dict(): + await test_internal_update_graph_operation_async(request_type=dict) + + +def test_internal_update_graph_operation_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.internal_update_graph_operation( + database='database_value', + operation_id='operation_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].operation_id + mock_val = 'operation_id_value' + assert arg == mock_val + + +def test_internal_update_graph_operation_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.internal_update_graph_operation( + spanner_database_admin.InternalUpdateGraphOperationRequest(), + database='database_value', + operation_id='operation_id_value', + ) + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.internal_update_graph_operation( + database='database_value', + operation_id='operation_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].operation_id + mock_val = 'operation_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.internal_update_graph_operation( + spanner_database_admin.InternalUpdateGraphOperationRequest(), + database='database_value', + operation_id='operation_id_value', + ) + + +def test_list_databases_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_databases_rest_required_fields(request_type=spanner_database_admin.ListDatabasesRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_databases(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_databases_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) + + +def test_list_databases_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_databases_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_database_admin.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_database_rest_required_fields(request_type=spanner_database_admin.CreateDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["create_statement"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["createStatement"] = 'create_statement_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "createStatement" in jsonified_request + assert jsonified_request["createStatement"] == 'create_statement_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "createStatement", ))) + + +def test_create_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + create_statement='create_statement_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) + + +def test_create_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + + +def test_get_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database] = mock_rpc + + request = {} + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_database_rest_required_fields(request_type=spanner_database_admin.GetDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) + + +def test_get_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + + +def test_update_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + + request = {} + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("database", "updateMask", ))) + + +def test_update_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database.name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) + + +def test_update_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_database_ddl_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc + + request = {} + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_ddl_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request_init["statements"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + jsonified_request["statements"] = 'statements_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + assert "statements" in jsonified_request + assert jsonified_request["statements"] == 'statements_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_database_ddl(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_database_ddl_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_database_ddl._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "statements", ))) + + +def test_update_database_ddl_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + statements=['statements_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_database_ddl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) + + +def test_update_database_ddl_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + + +def test_drop_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.drop_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc + + request = {} + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_drop_database_rest_required_fields(request_type=spanner_database_admin.DropDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.drop_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_drop_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.drop_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +def test_drop_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.drop_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) + + +def test_drop_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + + +def test_get_database_ddl_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc + + request = {} + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_database_ddl_rest_required_fields(request_type=spanner_database_admin.GetDatabaseDdlRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.GetDatabaseDdlResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_database_ddl(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_database_ddl_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_database_ddl._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +def test_get_database_ddl_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.GetDatabaseDdlResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_database_ddl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) + + +def test_get_database_ddl_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) + + +def test_set_iam_policy_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", ))) + + +def test_get_iam_policy_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + jsonified_request["permissions"] = 'permissions_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == 'permissions_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) + + +def test_test_iam_permissions_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + permissions=['permissions_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_create_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + + request = {} + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_rest_required_fields(request_type=gsad_backup.CreateBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "backupId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["backupId"] = 'backup_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backup_id", "encryption_config", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == 'backup_id_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_backup(request) + + expected_params = [ + ( + "backupId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("backupId", "encryptionConfig", )) & set(("parent", "backupId", "backup", ))) + + +def test_create_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) + + +def test_create_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + +def test_copy_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + + request = {} + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request_init["source_backup"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["backupId"] = 'backup_id_value' + jsonified_request["sourceBackup"] = 'source_backup_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == 'backup_id_value' + assert "sourceBackup" in jsonified_request + assert jsonified_request["sourceBackup"] == 'source_backup_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.copy_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_copy_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.copy_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "backupId", "sourceBackup", "expireTime", ))) + + +def test_copy_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.copy_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups:copy" % client.transport._host, args[1]) + + +def test_copy_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.copy_backup( + backup.CopyBackupRequest(), + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) + + +def test_get_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + + +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_rest_required_fields(request_type=gsad_backup.UpdateBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsad_backup.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("backup", "updateMask", ))) + + +def test_update_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{backup.name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) + + +def test_update_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) + + +def test_delete_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backups(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_backups_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) + + +def test_list_backups_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + + +def test_list_backups_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) + for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_restore_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc + + request = {} + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_database_rest_required_fields(request_type=spanner_database_admin.RestoreDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["databaseId"] = 'database_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == 'database_id_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.restore_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_restore_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.restore_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "databaseId", ))) + + +def test_restore_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + database_id='database_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.restore_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases:restore" % client.transport._host, args[1]) + + +def test_restore_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + +def test_list_database_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc + + request = {} + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_database_operations_rest_required_fields(request_type=spanner_database_admin.ListDatabaseOperationsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_database_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_database_operations_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_database_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_database_operations_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_database_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databaseOperations" % client.transport._host, args[1]) + + +def test_list_database_operations_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + + +def test_list_database_operations_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_database_admin.ListDatabaseOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_database_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_database_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_backup_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc + + request = {} + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_operations_rest_required_fields(request_type=backup.ListBackupOperationsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backup_operations_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backup_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_backup_operations_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backupOperations" % client.transport._host, args[1]) + + +def test_list_backup_operations_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + + +def test_list_backup_operations_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup.ListBackupOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_backup_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_backup_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_database_roles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_roles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc + + request = {} + client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_roles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_database_roles_rest_required_fields(request_type=spanner_database_admin.ListDatabaseRolesRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseRolesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_database_roles(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_database_roles_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_database_roles._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_database_roles_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseRolesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_database_roles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles" % client.transport._host, args[1]) + + +def test_list_database_roles_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent='parent_value', + ) + + +def test_list_database_roles_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_database_admin.ListDatabaseRolesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + pager = client.list_database_roles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) + for i in results) + + pages = list(client.list_database_roles(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_add_split_points_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_split_points in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc + + request = {} + client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.add_split_points(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_add_split_points_rest_required_fields(request_type=spanner_database_admin.AddSplitPointsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.AddSplitPointsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.add_split_points(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_split_points_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_split_points._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "splitPoints", ))) + + +def test_add_split_points_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.AddSplitPointsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.add_split_points(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints" % client.transport._host, args[1]) + + +def test_add_split_points_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_split_points( + spanner_database_admin.AddSplitPointsRequest(), + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + +def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc + + request = {} + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_schedule_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "backupScheduleId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupScheduleId" in jsonified_request + assert jsonified_request["backupScheduleId"] == request_init["backup_schedule_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["backupScheduleId"] = 'backup_schedule_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backup_schedule_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "backupScheduleId" in jsonified_request + assert jsonified_request["backupScheduleId"] == 'backup_schedule_id_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_backup_schedule(request) + + expected_params = [ + ( + "backupScheduleId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("backupScheduleId", )) & set(("parent", "backupScheduleId", "backupSchedule", ))) + + +def test_create_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) + + +def test_create_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + +def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc + + request = {} + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_schedule_rest_required_fields(request_type=backup_schedule.GetBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup_schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_get_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name='name_value', + ) + + +def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc + + request = {} + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("backupSchedule", "updateMask", ))) + + +def test_update_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_update_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc + + request = {} + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_schedule_rest_required_fields(request_type=backup_schedule.DeleteBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_delete_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name='name_value', + ) + + +def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_schedules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc + + request = {} + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_schedules_rest_required_fields(request_type=backup_schedule.ListBackupSchedulesRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup_schedule.ListBackupSchedulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_schedules(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backup_schedules_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_backup_schedules_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.ListBackupSchedulesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_schedules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) + + +def test_list_backup_schedules_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent='parent_value', + ) + + +def test_list_backup_schedules_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup_schedule.ListBackupSchedulesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + pager = client.list_backup_schedules(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) + for i in results) + + pages = list(client.list_backup_schedules(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_internal_update_graph_operation_rest_no_http_options(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = spanner_database_admin.InternalUpdateGraphOperationRequest() + with pytest.raises(RuntimeError): + client.internal_update_graph_operation(request) + + +def test_internal_update_graph_operation_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.internal_update_graph_operation({}) + assert ( + "Method InternalUpdateGraphOperation is not available over REST transport" + in str(not_implemented_error.value) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatabaseAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatabaseAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + transports.DatabaseAdminRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DatabaseAdminClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_databases_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabasesResponse() + client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = spanner_database_admin.Database() + client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_ddl_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_drop_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = None + client.drop_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.DropDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_ddl_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + client.get_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.CreateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.copy_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.CopyBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = backup.Backup() + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = gsad_backup.Backup() + client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = None + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = backup.ListBackupsResponse() + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_operations_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + client.list_database_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_operations_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = backup.ListBackupOperationsResponse() + client.list_backup_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_roles_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + client.list_database_roles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseRolesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_add_split_points_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value = spanner_database_admin.AddSplitPointsResponse() + client.add_split_points(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.AddSplitPointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = backup_schedule.BackupSchedule() + client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = None + client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_schedules_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = backup_schedule.ListBackupSchedulesResponse() + client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_internal_update_graph_operation_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() + client.internal_update_graph_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DatabaseAdminAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_databases_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + )) + await client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + )) + await client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_database_ddl_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_drop_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.drop_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.DropDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_database_ddl_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + )) + await client.get_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.CreateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_copy_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.copy_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.CopyBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + await client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + await client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backups_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( + next_page_token='next_page_token_value', + )) + await client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_restore_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_database_operations_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + )) + await client.list_database_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_operations_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + )) + await client.list_backup_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_database_roles_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + )) + await client.list_database_roles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseRolesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_add_split_points_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( + )) + await client.add_split_points(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.AddSplitPointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + await client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( + name='name_value', + )) + await client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + await client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_schedules_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + )) + await client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_internal_update_graph_operation_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( + )) + await client.internal_update_graph_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DatabaseAdminClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_databases_rest_bad_request(request_type=spanner_database_admin.ListDatabasesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_databases(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabasesRequest, + dict, +]) +def test_list_databases_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_databases") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.ListDatabasesRequest.pb(spanner_database_admin.ListDatabasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.ListDatabasesResponse.to_json(spanner_database_admin.ListDatabasesResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.ListDatabasesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabasesResponse() + post_with_metadata.return_value = spanner_database_admin.ListDatabasesResponse(), metadata + + client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_database_rest_bad_request(request_type=spanner_database_admin.CreateDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.CreateDatabaseRequest, + dict, +]) +def test_create_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.CreateDatabaseRequest.pb(spanner_database_admin.CreateDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.CreateDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_database_rest_bad_request(request_type=spanner_database_admin.GetDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseRequest, + dict, +]) +def test_get_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.GetDatabaseRequest.pb(spanner_database_admin.GetDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.Database.to_json(spanner_database_admin.Database()) + req.return_value.content = return_value + + request = spanner_database_admin.GetDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.Database() + post_with_metadata.return_value = spanner_database_admin.Database(), metadata + + client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_database_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseRequest, + dict, +]) +def test_update_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} + request_init["database"] = {'name': 'projects/sample1/instances/sample2/databases/sample3', 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'restore_info': {'source_type': 1, 'backup_info': {'backup': 'backup_value', 'version_time': {}, 'create_time': {}, 'source_database': 'source_database_value'}}, 'encryption_config': {'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'encryption_info': [{'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}], 'version_retention_period': 'version_retention_period_value', 'earliest_version_time': {}, 'default_leader': 'default_leader_value', 'database_dialect': 1, 'enable_drop_protection': True, 'reconciling': True} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.UpdateDatabaseRequest.pb(spanner_database_admin.UpdateDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.UpdateDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_database_ddl_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_database_ddl(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseDdlRequest, + dict, +]) +def test_update_database_ddl_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_database_ddl(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_ddl_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database_ddl") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.UpdateDatabaseDdlRequest.pb(spanner_database_admin.UpdateDatabaseDdlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.UpdateDatabaseDdlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_drop_database_rest_bad_request(request_type=spanner_database_admin.DropDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.drop_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.DropDatabaseRequest, + dict, +]) +def test_drop_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.drop_database(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_drop_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_drop_database") as pre: + pre.assert_not_called() + pb_message = spanner_database_admin.DropDatabaseRequest.pb(spanner_database_admin.DropDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_database_admin.DropDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.drop_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_database_ddl_rest_bad_request(request_type=spanner_database_admin.GetDatabaseDdlRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_database_ddl(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseDdlRequest, + dict, +]) +def test_get_database_ddl_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_database_ddl(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + assert response.proto_descriptors == b'proto_descriptors_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_ddl_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database_ddl") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.GetDatabaseDdlRequest.pb(spanner_database_admin.GetDatabaseDdlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.GetDatabaseDdlResponse.to_json(spanner_database_admin.GetDatabaseDdlResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.GetDatabaseDdlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.GetDatabaseDdlResponse() + post_with_metadata.return_value = spanner_database_admin.GetDatabaseDdlResponse(), metadata + + client.get_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) + req.return_value.content = return_value + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backup_rest_bad_request(request_type=gsad_backup.CreateBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.CreateBackupRequest, + dict, +]) +def test_create_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'name_value', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup.CreateBackupRequest.pb(gsad_backup.CreateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gsad_backup.CreateBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_copy_backup_rest_bad_request(request_type=backup.CopyBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.copy_backup(request) + + +@pytest.mark.parametrize("request_type", [ + backup.CopyBackupRequest, + dict, +]) +def test_copy_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.copy_backup(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_copy_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_copy_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.CopyBackupRequest.pb(backup.CopyBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backup.CopyBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.copy_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup(request) + + +@pytest.mark.parametrize("request_type", [ + backup.GetBackupRequest, + dict, +]) +def test_get_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.Backup.to_json(backup.Backup()) + req.return_value.content = return_value + + request = backup.GetBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.Backup() + post_with_metadata.return_value = backup.Backup(), metadata + + client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_backup_rest_bad_request(request_type=gsad_backup.UpdateBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.UpdateBackupRequest, + dict, +]) +def test_update_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} + request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'projects/sample1/instances/sample2/backups/sample3', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup.UpdateBackupRequest.pb(gsad_backup.UpdateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gsad_backup.Backup.to_json(gsad_backup.Backup()) + req.return_value.content = return_value + + request = gsad_backup.UpdateBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsad_backup.Backup() + post_with_metadata.return_value = gsad_backup.Backup(), metadata + + client.update_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup(request) + + +@pytest.mark.parametrize("request_type", [ + backup.DeleteBackupRequest, + dict, +]) +def test_delete_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup") as pre: + pre.assert_not_called() + pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = backup.DeleteBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backups(request) + + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupsRequest, + dict, +]) +def test_list_backups_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backups") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.ListBackupsResponse.to_json(backup.ListBackupsResponse()) + req.return_value.content = return_value + + request = backup.ListBackupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.ListBackupsResponse() + post_with_metadata.return_value = backup.ListBackupsResponse(), metadata + + client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_restore_database_rest_bad_request(request_type=spanner_database_admin.RestoreDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.restore_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.RestoreDatabaseRequest, + dict, +]) +def test_restore_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.restore_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_restore_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.RestoreDatabaseRequest.pb(spanner_database_admin.RestoreDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.RestoreDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.restore_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_database_operations_rest_bad_request(request_type=spanner_database_admin.ListDatabaseOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_database_operations(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseOperationsRequest, + dict, +]) +def test_list_database_operations_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_database_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_database_operations_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.ListDatabaseOperationsRequest.pb(spanner_database_admin.ListDatabaseOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.ListDatabaseOperationsResponse.to_json(spanner_database_admin.ListDatabaseOperationsResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.ListDatabaseOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + post_with_metadata.return_value = spanner_database_admin.ListDatabaseOperationsResponse(), metadata + + client.list_database_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_backup_operations_rest_bad_request(request_type=backup.ListBackupOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_operations(request) + + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupOperationsRequest, + dict, +]) +def test_list_backup_operations_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_operations_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.ListBackupOperationsRequest.pb(backup.ListBackupOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.ListBackupOperationsResponse.to_json(backup.ListBackupOperationsResponse()) + req.return_value.content = return_value + + request = backup.ListBackupOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.ListBackupOperationsResponse() + post_with_metadata.return_value = backup.ListBackupOperationsResponse(), metadata + + client.list_backup_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_database_roles_rest_bad_request(request_type=spanner_database_admin.ListDatabaseRolesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_database_roles(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseRolesRequest, + dict, +]) +def test_list_database_roles_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_database_roles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseRolesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_database_roles_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_roles") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.ListDatabaseRolesRequest.pb(spanner_database_admin.ListDatabaseRolesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.ListDatabaseRolesResponse.to_json(spanner_database_admin.ListDatabaseRolesResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.ListDatabaseRolesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabaseRolesResponse() + post_with_metadata.return_value = spanner_database_admin.ListDatabaseRolesResponse(), metadata + + client.list_database_roles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_add_split_points_rest_bad_request(request_type=spanner_database_admin.AddSplitPointsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.add_split_points(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.AddSplitPointsRequest, + dict, +]) +def test_add_split_points_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.AddSplitPointsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.add_split_points(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_split_points_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_add_split_points") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.AddSplitPointsRequest.pb(spanner_database_admin.AddSplitPointsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.AddSplitPointsResponse.to_json(spanner_database_admin.AddSplitPointsResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.AddSplitPointsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.AddSplitPointsResponse() + post_with_metadata.return_value = spanner_database_admin.AddSplitPointsResponse(), metadata + + client.add_split_points(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.CreateBackupScheduleRequest, + dict, +]) +def test_create_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request_init["backup_schedule"] = {'name': 'name_value', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup_schedule.CreateBackupScheduleRequest.meta.fields["backup_schedule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup_schedule.CreateBackupScheduleRequest.pb(gsad_backup_schedule.CreateBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) + req.return_value.content = return_value + + request = gsad_backup_schedule.CreateBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsad_backup_schedule.BackupSchedule() + post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata + + client.create_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backup_schedule_rest_bad_request(request_type=backup_schedule.GetBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.GetBackupScheduleRequest, + dict, +]) +def test_get_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup_schedule.GetBackupScheduleRequest.pb(backup_schedule.GetBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup_schedule.BackupSchedule.to_json(backup_schedule.BackupSchedule()) + req.return_value.content = return_value + + request = backup_schedule.GetBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup_schedule.BackupSchedule() + post_with_metadata.return_value = backup_schedule.BackupSchedule(), metadata + + client.get_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.UpdateBackupScheduleRequest, + dict, +]) +def test_update_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} + request_init["backup_schedule"] = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup_schedule.UpdateBackupScheduleRequest.meta.fields["backup_schedule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(gsad_backup_schedule.UpdateBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) + req.return_value.content = return_value + + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsad_backup_schedule.BackupSchedule() + post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata + + client.update_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_backup_schedule_rest_bad_request(request_type=backup_schedule.DeleteBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.DeleteBackupScheduleRequest, + dict, +]) +def test_delete_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup_schedule") as pre: + pre.assert_not_called() + pb_message = backup_schedule.DeleteBackupScheduleRequest.pb(backup_schedule.DeleteBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = backup_schedule.DeleteBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_backup_schedules_rest_bad_request(request_type=backup_schedule.ListBackupSchedulesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_schedules(request) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.ListBackupSchedulesRequest, + dict, +]) +def test_list_backup_schedules_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_schedules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_schedules_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_schedules") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup_schedule.ListBackupSchedulesRequest.pb(backup_schedule.ListBackupSchedulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup_schedule.ListBackupSchedulesResponse.to_json(backup_schedule.ListBackupSchedulesResponse()) + req.return_value.content = return_value + + request = backup_schedule.ListBackupSchedulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup_schedule.ListBackupSchedulesResponse() + post_with_metadata.return_value = backup_schedule.ListBackupSchedulesResponse(), metadata + + client.list_backup_schedules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_internal_update_graph_operation_rest_error(): + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.internal_update_graph_operation({}) + assert ( + "Method InternalUpdateGraphOperation is not available over REST transport" + in str(not_implemented_error.value) + ) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_databases_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_ddl_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + client.update_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_drop_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + client.drop_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.DropDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_ddl_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + client.get_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + client.create_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.CreateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + client.copy_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.CopyBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_operations_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + client.list_database_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_operations_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + client.list_backup_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_roles_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + client.list_database_roles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseRolesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_add_split_points_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + client.add_split_points(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.AddSplitPointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_schedules_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_internal_update_graph_operation_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + client.internal_update_graph_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() + + assert args[0] == request_msg + + +def test_database_admin_rest_lro_client(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatabaseAdminGrpcTransport, + ) + +def test_database_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatabaseAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_database_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DatabaseAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_databases', + 'create_database', + 'get_database', + 'update_database', + 'update_database_ddl', + 'drop_database', + 'get_database_ddl', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'create_backup', + 'copy_backup', + 'get_backup', + 'update_backup', + 'delete_backup', + 'list_backups', + 'restore_database', + 'list_database_operations', + 'list_backup_operations', + 'list_database_roles', + 'add_split_points', + 'create_backup_schedule', + 'get_backup_schedule', + 'update_backup_schedule', + 'delete_backup_schedule', + 'list_backup_schedules', + 'internal_update_graph_operation', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_database_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatabaseAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id="octopus", + ) + + +def test_database_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatabaseAdminTransport() + adc.assert_called_once() + + +def test_database_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DatabaseAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + ], +) +def test_database_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + transports.DatabaseAdminRestTransport, + ], +) +def test_database_admin_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DatabaseAdminGrpcTransport, grpc_helpers), + (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_database_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_database_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DatabaseAdminRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_database_admin_host_no_port(transport_name): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_database_admin_host_with_port(transport_name): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_database_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DatabaseAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DatabaseAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_databases._session + session2 = client2.transport.list_databases._session + assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 + session1 = client1.transport.get_database._session + session2 = client2.transport.get_database._session + assert session1 != session2 + session1 = client1.transport.update_database._session + session2 = client2.transport.update_database._session + assert session1 != session2 + session1 = client1.transport.update_database_ddl._session + session2 = client2.transport.update_database_ddl._session + assert session1 != session2 + session1 = client1.transport.drop_database._session + session2 = client2.transport.drop_database._session + assert session1 != session2 + session1 = client1.transport.get_database_ddl._session + session2 = client2.transport.get_database_ddl._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.create_backup._session + session2 = client2.transport.create_backup._session + assert session1 != session2 + session1 = client1.transport.copy_backup._session + session2 = client2.transport.copy_backup._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.restore_database._session + session2 = client2.transport.restore_database._session + assert session1 != session2 + session1 = client1.transport.list_database_operations._session + session2 = client2.transport.list_database_operations._session + assert session1 != session2 + session1 = client1.transport.list_backup_operations._session + session2 = client2.transport.list_backup_operations._session + assert session1 != session2 + session1 = client1.transport.list_database_roles._session + session2 = client2.transport.list_database_roles._session + assert session1 != session2 + session1 = client1.transport.add_split_points._session + session2 = client2.transport.add_split_points._session + assert session1 != session2 + session1 = client1.transport.create_backup_schedule._session + session2 = client2.transport.create_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.get_backup_schedule._session + session2 = client2.transport.get_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.update_backup_schedule._session + session2 = client2.transport.update_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.delete_backup_schedule._session + session2 = client2.transport.delete_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.list_backup_schedules._session + session2 = client2.transport.list_backup_schedules._session + assert session1 != session2 + session1 = client1.transport.internal_update_graph_operation._session + session2 = client2.transport.internal_update_graph_operation._session + assert session1 != session2 +def test_database_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatabaseAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_database_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatabaseAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_database_admin_grpc_lro_client(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_database_admin_grpc_lro_async_client(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + instance = "clam" + backup = "whelk" + expected = "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) + actual = DatabaseAdminClient.backup_path(project, instance, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "backup": "nudibranch", + } + path = DatabaseAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_backup_path(path) + assert expected == actual + +def test_backup_schedule_path(): + project = "cuttlefish" + instance = "mussel" + database = "winkle" + schedule = "nautilus" + expected = "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) + actual = DatabaseAdminClient.backup_schedule_path(project, instance, database, schedule) + assert expected == actual + + +def test_parse_backup_schedule_path(): + expected = { + "project": "scallop", + "instance": "abalone", + "database": "squid", + "schedule": "clam", + } + path = DatabaseAdminClient.backup_schedule_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_backup_schedule_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "whelk" + location = "octopus" + key_ring = "oyster" + crypto_key = "nudibranch" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = DatabaseAdminClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "key_ring": "winkle", + "crypto_key": "nautilus", + } + path = DatabaseAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_crypto_key_path(path) + assert expected == actual + +def test_crypto_key_version_path(): + project = "scallop" + location = "abalone" + key_ring = "squid" + crypto_key = "clam" + crypto_key_version = "whelk" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) + actual = DatabaseAdminClient.crypto_key_version_path(project, location, key_ring, crypto_key, crypto_key_version) + assert expected == actual + + +def test_parse_crypto_key_version_path(): + expected = { + "project": "octopus", + "location": "oyster", + "key_ring": "nudibranch", + "crypto_key": "cuttlefish", + "crypto_key_version": "mussel", + } + path = DatabaseAdminClient.crypto_key_version_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_crypto_key_version_path(path) + assert expected == actual + +def test_database_path(): + project = "winkle" + instance = "nautilus" + database = "scallop" + expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + actual = DatabaseAdminClient.database_path(project, instance, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "abalone", + "instance": "squid", + "database": "clam", + } + path = DatabaseAdminClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_database_path(path) + assert expected == actual + +def test_database_role_path(): + project = "whelk" + instance = "octopus" + database = "oyster" + role = "nudibranch" + expected = "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) + actual = DatabaseAdminClient.database_role_path(project, instance, database, role) + assert expected == actual + + +def test_parse_database_role_path(): + expected = { + "project": "cuttlefish", + "instance": "mussel", + "database": "winkle", + "role": "nautilus", + } + path = DatabaseAdminClient.database_role_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_database_role_path(path) + assert expected == actual + +def test_instance_path(): + project = "scallop" + instance = "abalone" + expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + actual = DatabaseAdminClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "squid", + "instance": "clam", + } + path = DatabaseAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_instance_path(path) + assert expected == actual + +def test_instance_partition_path(): + project = "whelk" + instance = "octopus" + instance_partition = "oyster" + expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + actual = DatabaseAdminClient.instance_partition_path(project, instance, instance_partition) + assert expected == actual + + +def test_parse_instance_partition_path(): + expected = { + "project": "nudibranch", + "instance": "cuttlefish", + "instance_partition": "mussel", + } + path = DatabaseAdminClient.instance_partition_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_instance_partition_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DatabaseAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DatabaseAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = DatabaseAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DatabaseAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DatabaseAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DatabaseAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = DatabaseAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DatabaseAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DatabaseAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DatabaseAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = DatabaseAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc new file mode 100644 index 0000000000..040d100fc8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner_admin_instance/__init__.py + google/cloud/spanner_admin_instance/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/.flake8 b/owl-bot-staging/spanner_admin_instance/v1/.flake8 new file mode 100644 index 0000000000..90316de214 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/spanner_admin_instance/v1/LICENSE b/owl-bot-staging/spanner_admin_instance/v1/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in new file mode 100644 index 0000000000..dae249ec89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/README.rst b/owl-bot-staging/spanner_admin_instance/v1/README.rst new file mode 100644 index 0000000000..7123886f79 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Spanner Admin Instance API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner Admin Instance API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css new file mode 100644 index 0000000000..b0a295464b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html new file mode 100644 index 0000000000..95e9c77fcf --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py new file mode 100644 index 0000000000..4e0af4e74e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner-admin-instance documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner-admin-instance" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-instance", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-admin-instance-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-instance.tex", + u"google-cloud-spanner-admin-instance Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-spanner-admin-instance", + "google-cloud-spanner-admin-instance Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-instance", + "google-cloud-spanner-admin-instance Documentation", + author, + "google-cloud-spanner-admin-instance", + "google-cloud-spanner-admin-instance Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst new file mode 100644 index 0000000000..545d74cbb9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_admin_instance_v1/services_ + spanner_admin_instance_v1/types_ diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst new file mode 100644 index 0000000000..536d17b2ea --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst new file mode 100644 index 0000000000..fe820b3fad --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst @@ -0,0 +1,10 @@ +InstanceAdmin +------------------------------- + +.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst new file mode 100644 index 0000000000..407d44cc34 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner Admin Instance v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + instance_admin diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst new file mode 100644 index 0000000000..250cf6bf9b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Spanner Admin Instance v1 API +==================================================== + +.. automodule:: google.cloud.spanner_admin_instance_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py new file mode 100644 index 0000000000..a729ed0a13 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_instance import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.spanner_admin_instance_v1.services.instance_admin.client import InstanceAdminClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin.async_client import InstanceAdminAsyncClient + +from google.cloud.spanner_admin_instance_v1.types.common import OperationProgress +from google.cloud.spanner_admin_instance_v1.types.common import ReplicaSelection +from google.cloud.spanner_admin_instance_v1.types.common import FulfillmentPeriod +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import AutoscalingConfig +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import FreeInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import Instance +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstanceConfig +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstancePartition +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaComputeCapacity +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaInfo +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceRequest + +__all__ = ('InstanceAdminClient', + 'InstanceAdminAsyncClient', + 'OperationProgress', + 'ReplicaSelection', + 'FulfillmentPeriod', + 'AutoscalingConfig', + 'CreateInstanceConfigMetadata', + 'CreateInstanceConfigRequest', + 'CreateInstanceMetadata', + 'CreateInstancePartitionMetadata', + 'CreateInstancePartitionRequest', + 'CreateInstanceRequest', + 'DeleteInstanceConfigRequest', + 'DeleteInstancePartitionRequest', + 'DeleteInstanceRequest', + 'FreeInstanceMetadata', + 'GetInstanceConfigRequest', + 'GetInstancePartitionRequest', + 'GetInstanceRequest', + 'Instance', + 'InstanceConfig', + 'InstancePartition', + 'ListInstanceConfigOperationsRequest', + 'ListInstanceConfigOperationsResponse', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'ListInstancePartitionOperationsRequest', + 'ListInstancePartitionOperationsResponse', + 'ListInstancePartitionsRequest', + 'ListInstancePartitionsResponse', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'MoveInstanceMetadata', + 'MoveInstanceRequest', + 'MoveInstanceResponse', + 'ReplicaComputeCapacity', + 'ReplicaInfo', + 'UpdateInstanceConfigMetadata', + 'UpdateInstanceConfigRequest', + 'UpdateInstanceMetadata', + 'UpdateInstancePartitionMetadata', + 'UpdateInstancePartitionRequest', + 'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed new file mode 100644 index 0000000000..915a8e55e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py new file mode 100644 index 0000000000..384e545d89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.instance_admin import InstanceAdminClient +from .services.instance_admin import InstanceAdminAsyncClient + +from .types.common import OperationProgress +from .types.common import ReplicaSelection +from .types.common import FulfillmentPeriod +from .types.spanner_instance_admin import AutoscalingConfig +from .types.spanner_instance_admin import CreateInstanceConfigMetadata +from .types.spanner_instance_admin import CreateInstanceConfigRequest +from .types.spanner_instance_admin import CreateInstanceMetadata +from .types.spanner_instance_admin import CreateInstancePartitionMetadata +from .types.spanner_instance_admin import CreateInstancePartitionRequest +from .types.spanner_instance_admin import CreateInstanceRequest +from .types.spanner_instance_admin import DeleteInstanceConfigRequest +from .types.spanner_instance_admin import DeleteInstancePartitionRequest +from .types.spanner_instance_admin import DeleteInstanceRequest +from .types.spanner_instance_admin import FreeInstanceMetadata +from .types.spanner_instance_admin import GetInstanceConfigRequest +from .types.spanner_instance_admin import GetInstancePartitionRequest +from .types.spanner_instance_admin import GetInstanceRequest +from .types.spanner_instance_admin import Instance +from .types.spanner_instance_admin import InstanceConfig +from .types.spanner_instance_admin import InstancePartition +from .types.spanner_instance_admin import ListInstanceConfigOperationsRequest +from .types.spanner_instance_admin import ListInstanceConfigOperationsResponse +from .types.spanner_instance_admin import ListInstanceConfigsRequest +from .types.spanner_instance_admin import ListInstanceConfigsResponse +from .types.spanner_instance_admin import ListInstancePartitionOperationsRequest +from .types.spanner_instance_admin import ListInstancePartitionOperationsResponse +from .types.spanner_instance_admin import ListInstancePartitionsRequest +from .types.spanner_instance_admin import ListInstancePartitionsResponse +from .types.spanner_instance_admin import ListInstancesRequest +from .types.spanner_instance_admin import ListInstancesResponse +from .types.spanner_instance_admin import MoveInstanceMetadata +from .types.spanner_instance_admin import MoveInstanceRequest +from .types.spanner_instance_admin import MoveInstanceResponse +from .types.spanner_instance_admin import ReplicaComputeCapacity +from .types.spanner_instance_admin import ReplicaInfo +from .types.spanner_instance_admin import UpdateInstanceConfigMetadata +from .types.spanner_instance_admin import UpdateInstanceConfigRequest +from .types.spanner_instance_admin import UpdateInstanceMetadata +from .types.spanner_instance_admin import UpdateInstancePartitionMetadata +from .types.spanner_instance_admin import UpdateInstancePartitionRequest +from .types.spanner_instance_admin import UpdateInstanceRequest + +__all__ = ( + 'InstanceAdminAsyncClient', +'AutoscalingConfig', +'CreateInstanceConfigMetadata', +'CreateInstanceConfigRequest', +'CreateInstanceMetadata', +'CreateInstancePartitionMetadata', +'CreateInstancePartitionRequest', +'CreateInstanceRequest', +'DeleteInstanceConfigRequest', +'DeleteInstancePartitionRequest', +'DeleteInstanceRequest', +'FreeInstanceMetadata', +'FulfillmentPeriod', +'GetInstanceConfigRequest', +'GetInstancePartitionRequest', +'GetInstanceRequest', +'Instance', +'InstanceAdminClient', +'InstanceConfig', +'InstancePartition', +'ListInstanceConfigOperationsRequest', +'ListInstanceConfigOperationsResponse', +'ListInstanceConfigsRequest', +'ListInstanceConfigsResponse', +'ListInstancePartitionOperationsRequest', +'ListInstancePartitionOperationsResponse', +'ListInstancePartitionsRequest', +'ListInstancePartitionsResponse', +'ListInstancesRequest', +'ListInstancesResponse', +'MoveInstanceMetadata', +'MoveInstanceRequest', +'MoveInstanceResponse', +'OperationProgress', +'ReplicaComputeCapacity', +'ReplicaInfo', +'ReplicaSelection', +'UpdateInstanceConfigMetadata', +'UpdateInstanceConfigRequest', +'UpdateInstanceMetadata', +'UpdateInstancePartitionMetadata', +'UpdateInstancePartitionRequest', +'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json new file mode 100644 index 0000000000..60fa46718a --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json @@ -0,0 +1,343 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_admin_instance_v1", + "protoPackage": "google.spanner.admin.instance.v1", + "schema": "1.0", + "services": { + "InstanceAdmin": { + "clients": { + "grpc": { + "libraryClient": "InstanceAdminClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateInstanceConfig": { + "methods": [ + "create_instance_config" + ] + }, + "CreateInstancePartition": { + "methods": [ + "create_instance_partition" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteInstanceConfig": { + "methods": [ + "delete_instance_config" + ] + }, + "DeleteInstancePartition": { + "methods": [ + "delete_instance_partition" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "GetInstancePartition": { + "methods": [ + "get_instance_partition" + ] + }, + "ListInstanceConfigOperations": { + "methods": [ + "list_instance_config_operations" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstancePartitionOperations": { + "methods": [ + "list_instance_partition_operations" + ] + }, + "ListInstancePartitions": { + "methods": [ + "list_instance_partitions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateInstancePartition": { + "methods": [ + "update_instance_partition" + ] + } + } + }, + "grpc-async": { + "libraryClient": "InstanceAdminAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateInstanceConfig": { + "methods": [ + "create_instance_config" + ] + }, + "CreateInstancePartition": { + "methods": [ + "create_instance_partition" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteInstanceConfig": { + "methods": [ + "delete_instance_config" + ] + }, + "DeleteInstancePartition": { + "methods": [ + "delete_instance_partition" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "GetInstancePartition": { + "methods": [ + "get_instance_partition" + ] + }, + "ListInstanceConfigOperations": { + "methods": [ + "list_instance_config_operations" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstancePartitionOperations": { + "methods": [ + "list_instance_partition_operations" + ] + }, + "ListInstancePartitions": { + "methods": [ + "list_instance_partitions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateInstancePartition": { + "methods": [ + "update_instance_partition" + ] + } + } + }, + "rest": { + "libraryClient": "InstanceAdminClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateInstanceConfig": { + "methods": [ + "create_instance_config" + ] + }, + "CreateInstancePartition": { + "methods": [ + "create_instance_partition" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteInstanceConfig": { + "methods": [ + "delete_instance_config" + ] + }, + "DeleteInstancePartition": { + "methods": [ + "delete_instance_partition" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "GetInstancePartition": { + "methods": [ + "get_instance_partition" + ] + }, + "ListInstanceConfigOperations": { + "methods": [ + "list_instance_config_operations" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstancePartitionOperations": { + "methods": [ + "list_instance_partition_operations" + ] + }, + "ListInstancePartitions": { + "methods": [ + "list_instance_partitions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateInstancePartition": { + "methods": [ + "update_instance_partition" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed new file mode 100644 index 0000000000..915a8e55e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py new file mode 100644 index 0000000000..cbf94b283c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py new file mode 100644 index 0000000000..72ef4ab187 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceAdminClient +from .async_client import InstanceAdminAsyncClient + +__all__ = ( + 'InstanceAdminClient', + 'InstanceAdminAsyncClient', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py new file mode 100644 index 0000000000..de12cfc17e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -0,0 +1,3468 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +import uuid + +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .client import InstanceAdminClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class InstanceAdminAsyncClient: + """Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + """ + + _client: InstanceAdminClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = InstanceAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = InstanceAdminClient._DEFAULT_UNIVERSE + + instance_path = staticmethod(InstanceAdminClient.instance_path) + parse_instance_path = staticmethod(InstanceAdminClient.parse_instance_path) + instance_config_path = staticmethod(InstanceAdminClient.instance_config_path) + parse_instance_config_path = staticmethod(InstanceAdminClient.parse_instance_config_path) + instance_partition_path = staticmethod(InstanceAdminClient.instance_partition_path) + parse_instance_partition_path = staticmethod(InstanceAdminClient.parse_instance_partition_path) + common_billing_account_path = staticmethod(InstanceAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(InstanceAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(InstanceAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(InstanceAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(InstanceAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(InstanceAdminClient.parse_common_organization_path) + common_project_path = staticmethod(InstanceAdminClient.common_project_path) + parse_common_project_path = staticmethod(InstanceAdminClient.parse_common_project_path) + common_location_path = staticmethod(InstanceAdminClient.common_location_path) + parse_common_location_path = staticmethod(InstanceAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return InstanceAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> InstanceAdminTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = InstanceAdminClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance admin async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = InstanceAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.instance_v1.InstanceAdminAsyncClient`.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "credentialsType": None, + } + ) + + async def list_instance_configs(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigsAsyncPager: + r"""Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]]): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + parent (:class:`str`): + Required. The name of the project for which a list of + supported instance configurations is requested. Values + are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): + request = spanner_instance_admin.ListInstanceConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstanceConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance_config(self, + request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstanceConfig: + r"""Gets information about a particular instance + configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]]): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + name (:class:`str`): + Required. The name of the requested instance + configuration. Values are of the form + ``projects//instanceConfigs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): + request = spanner_instance_admin.GetInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance_config(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + instance_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]]): + The request object. The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + parent (:class:`str`): + Required. The name of the project in which to create the + instance configuration. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): + Required. The ``InstanceConfig`` proto of the + configuration to create. ``instance_config.name`` must + be ``/instanceConfigs/``. + ``instance_config.base_config`` must be a Google-managed + configuration name, e.g. /instanceConfigs/us-east1, + /instanceConfigs/nam3. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config_id (:class:`str`): + Required. The ID of the instance configuration to + create. Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and + 64 characters in length. The ``custom-`` prefix is + required to avoid name conflicts with Google-managed + configurations. + + This corresponds to the ``instance_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_config, instance_config_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): + request = spanner_instance_admin.CreateInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_config is not None: + request.instance_config = instance_config + if instance_config_id is not None: + request.instance_config_id = instance_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + async def update_instance_config(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, + *, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]]): + The request object. The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): + Required. The user instance configuration to update, + which must always include the instance configuration + name. Otherwise, only fields mentioned in + [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] + need be included. To prevent conflicts of concurrent + updates, + [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + can be used. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + from being erased accidentally by clients that do not + know about them. Only display_name and labels can be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): + request = spanner_instance_admin.UpdateInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_config is not None: + request.instance_config = instance_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_config.name", request.instance_config.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance_config(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_config(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]]): + The request object. The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + name (:class:`str`): + Required. The name of the instance configuration to be + deleted. Values are of the form + ``projects//instanceConfigs/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): + request = spanner_instance_admin.DeleteInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_instance_config_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigOperationsAsyncPager: + r"""Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]]): + The request object. The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + parent (:class:`str`): + Required. The project of the instance configuration + operations. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager: + The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): + request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_config_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstanceConfigOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_instances(self, + request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all instances in the given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]]): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + parent (:class:`str`): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancesRequest): + request = spanner_instance_admin.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_instance_partitions(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionsAsyncPager: + r"""Lists all instance partitions for the given instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]]): + The request object. The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + parent (:class:`str`): + Required. The instance whose instance partitions should + be listed. Values are of the form + ``projects//instances/``. Use + ``{instance} = '-'`` to list instance partitions for all + Instances in a project, e.g., + ``projects/myproject/instances/-``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager: + The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): + request = spanner_instance_admin.ListInstancePartitionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancePartitionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance(self, + request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.Instance: + r"""Gets information about a particular instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]]): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + name (:class:`str`): + Required. The name of the requested instance. Values are + of the form ``projects//instances/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceRequest): + request = spanner_instance_admin.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[spanner_instance_admin.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]]): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + parent (:class:`str`): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` + and must be between 2 and 64 characters in length. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): + Required. The instance to create. The name may be + omitted, but if specified must be + ``/instances/``. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): + request = spanner_instance_admin.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.CreateInstanceMetadata, + ) + + # Done; return the response. + return response + + async def update_instance(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[spanner_instance_admin.Instance] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]]): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): + Required. The instance to update, which must always + include the instance name. Otherwise, only fields + mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] + should be updated. The field mask must always be + specified; this prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): + request = spanner_instance_admin.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.UpdateInstanceMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]]): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + name (:class:`str`): + Required. The name of the instance to be deleted. Values + are of the form + ``projects//instances/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): + request = spanner_instance_admin.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance_partition(self, + request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstancePartition: + r"""Gets information about a particular instance + partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]]): + The request object. The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + name (:class:`str`): + Required. The name of the requested instance partition. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstancePartition: + An isolated set of Cloud Spanner + resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): + request = spanner_instance_admin.GetInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance_partition(self, + request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + instance_partition_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]]): + The request object. The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + parent (:class:`str`): + Required. The name of the instance in which to create + the instance partition. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): + Required. The instance partition to create. The + instance_partition.name may be omitted, but if specified + must be + ``/instancePartitions/``. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition_id (:class:`str`): + Required. The ID of the instance partition to create. + Valid identifiers are of the form + ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. + + This corresponds to the ``instance_partition_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_partition, instance_partition_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): + request = spanner_instance_admin.CreateInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_partition is not None: + request.instance_partition = instance_partition + if instance_partition_id is not None: + request.instance_partition_id = instance_partition_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance_partition(self, + request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_partition(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]]): + The request object. The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + name (:class:`str`): + Required. The name of the instance partition to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): + request = spanner_instance_admin.DeleteInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_instance_partition(self, + request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, + *, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]]): + The request object. The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): + Required. The instance partition to update, which must + always include the instance partition name. Otherwise, + only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] + need be included. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_partition, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): + request = spanner_instance_admin.UpdateInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_partition is not None: + request.instance_partition = instance_partition + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_partition.name", request.instance_partition.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + async def list_instance_partition_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionOperationsAsyncPager: + r"""Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]]): + The request object. The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + parent (:class:`str`): + Required. The parent instance of the instance partition + operations. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager: + The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): + request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partition_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancePartitionOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def move_instance(self, + request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]]): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): + request = spanner_instance_admin.MoveInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.MoveInstanceResponse, + metadata_type=spanner_instance_admin.MoveInstanceMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "InstanceAdminAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "InstanceAdminAsyncClient", +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py new file mode 100644 index 0000000000..6f504cdc37 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -0,0 +1,3849 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import uuid +import warnings + +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import InstanceAdminGrpcTransport +from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .transports.rest import InstanceAdminRestTransport + + +class InstanceAdminClientMeta(type): + """Metaclass for the InstanceAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] + _transport_registry["grpc"] = InstanceAdminGrpcTransport + _transport_registry["grpc_asyncio"] = InstanceAdminGrpcAsyncIOTransport + _transport_registry["rest"] = InstanceAdminRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InstanceAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceAdminClient(metaclass=InstanceAdminClientMeta): + """Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceAdminTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_config_path(project: str,instance_config: str,) -> str: + """Returns a fully-qualified instance_config string.""" + return "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) + + @staticmethod + def parse_instance_config_path(path: str) -> Dict[str,str]: + """Parses a instance_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instanceConfigs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: + """Returns a fully-qualified instance_partition string.""" + return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + + @staticmethod + def parse_instance_partition_path(path: str) -> Dict[str,str]: + """Parses a instance_partition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = InstanceAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = InstanceAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = InstanceAdminClient._read_environment_variables() + self._client_cert_source = InstanceAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = InstanceAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, InstanceAdminTransport) + if transport_provided: + # transport is a InstanceAdminTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(InstanceAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + InstanceAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[InstanceAdminTransport], Callable[..., InstanceAdminTransport]] = ( + InstanceAdminClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstanceAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.instance_v1.InstanceAdminClient`.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "credentialsType": None, + } + ) + + def list_instance_configs(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigsPager: + r"""Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + parent (str): + Required. The name of the project for which a list of + supported instance configurations is requested. Values + are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): + request = spanner_instance_admin.ListInstanceConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstanceConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance_config(self, + request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstanceConfig: + r"""Gets information about a particular instance + configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + name (str): + Required. The name of the requested instance + configuration. Values are of the form + ``projects//instanceConfigs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): + request = spanner_instance_admin.GetInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance_config(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + instance_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]): + The request object. The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + parent (str): + Required. The name of the project in which to create the + instance configuration. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The ``InstanceConfig`` proto of the + configuration to create. ``instance_config.name`` must + be ``/instanceConfigs/``. + ``instance_config.base_config`` must be a Google-managed + configuration name, e.g. /instanceConfigs/us-east1, + /instanceConfigs/nam3. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config_id (str): + Required. The ID of the instance configuration to + create. Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and + 64 characters in length. The ``custom-`` prefix is + required to avoid name conflicts with Google-managed + configurations. + + This corresponds to the ``instance_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_config, instance_config_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): + request = spanner_instance_admin.CreateInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_config is not None: + request.instance_config = instance_config + if instance_config_id is not None: + request.instance_config_id = instance_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + def update_instance_config(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, + *, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]): + The request object. The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The user instance configuration to update, + which must always include the instance configuration + name. Otherwise, only fields mentioned in + [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] + need be included. To prevent conflicts of concurrent + updates, + [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + can be used. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + from being erased accidentally by clients that do not + know about them. Only display_name and labels can be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): + request = spanner_instance_admin.UpdateInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_config is not None: + request.instance_config = instance_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_config.name", request.instance_config.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + def delete_instance_config(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_config(request=request) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]): + The request object. The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + name (str): + Required. The name of the instance configuration to be + deleted. Values are of the form + ``projects//instanceConfigs/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): + request = spanner_instance_admin.DeleteInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_instance_config_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigOperationsPager: + r"""Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]): + The request object. The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + parent (str): + Required. The project of the instance configuration + operations. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager: + The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): + request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_config_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstanceConfigOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instances(self, + request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all instances in the given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + parent (str): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancesRequest): + request = spanner_instance_admin.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instance_partitions(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionsPager: + r"""Lists all instance partitions for the given instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]): + The request object. The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + parent (str): + Required. The instance whose instance partitions should + be listed. Values are of the form + ``projects//instances/``. Use + ``{instance} = '-'`` to list instance partitions for all + Instances in a project, e.g., + ``projects/myproject/instances/-``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager: + The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): + request = spanner_instance_admin.ListInstancePartitionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancePartitionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.Instance: + r"""Gets information about a particular instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + name (str): + Required. The name of the requested instance. Values are + of the form ``projects//instances/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceRequest): + request = spanner_instance_admin.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[spanner_instance_admin.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + parent (str): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` + and must be between 2 and 64 characters in length. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to create. The name may be + omitted, but if specified must be + ``/instances/``. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): + request = spanner_instance_admin.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.CreateInstanceMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[spanner_instance_admin.Instance] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to update, which must always + include the instance name. Otherwise, only fields + mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] + should be updated. The field mask must always be + specified; this prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): + request = spanner_instance_admin.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.UpdateInstanceMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + client.delete_instance(request=request) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + name (str): + Required. The name of the instance to be deleted. Values + are of the form + ``projects//instances/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): + request = spanner_instance_admin.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance_partition(self, + request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstancePartition: + r"""Gets information about a particular instance + partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]): + The request object. The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + name (str): + Required. The name of the requested instance partition. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstancePartition: + An isolated set of Cloud Spanner + resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): + request = spanner_instance_admin.GetInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance_partition(self, + request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + instance_partition_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]): + The request object. The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + parent (str): + Required. The name of the instance in which to create + the instance partition. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to create. The + instance_partition.name may be omitted, but if specified + must be + ``/instancePartitions/``. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition_id (str): + Required. The ID of the instance partition to create. + Valid identifiers are of the form + ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. + + This corresponds to the ``instance_partition_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_partition, instance_partition_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): + request = spanner_instance_admin.CreateInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_partition is not None: + request.instance_partition = instance_partition + if instance_partition_id is not None: + request.instance_partition_id = instance_partition_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + def delete_instance_partition(self, + request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_partition(request=request) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]): + The request object. The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + name (str): + Required. The name of the instance partition to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): + request = spanner_instance_admin.DeleteInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_instance_partition(self, + request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, + *, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]): + The request object. The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to update, which must + always include the instance partition name. Otherwise, + only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] + need be included. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_partition, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): + request = spanner_instance_admin.UpdateInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_partition is not None: + request.instance_partition = instance_partition + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_partition.name", request.instance_partition.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + def list_instance_partition_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionOperationsPager: + r"""Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]): + The request object. The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + parent (str): + Required. The parent instance of the instance partition + operations. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager: + The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): + request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_partition_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancePartitionOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_instance(self, + request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): + request = spanner_instance_admin.MoveInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.MoveInstanceResponse, + metadata_type=spanner_instance_admin.MoveInstanceMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "InstanceAdminClient", +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py new file mode 100644 index 0000000000..bde9847337 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py @@ -0,0 +1,723 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.longrunning import operations_pb2 # type: ignore + + +class ListInstanceConfigsPager: + """A pager for iterating through ``list_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instance_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstanceConfigs`` requests and continue to iterate + through the ``instance_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstanceConfigsResponse], + request: spanner_instance_admin.ListInstanceConfigsRequest, + response: spanner_instance_admin.ListInstanceConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: + for page in self.pages: + yield from page.instance_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigsAsyncPager: + """A pager for iterating through ``list_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instance_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstanceConfigs`` requests and continue to iterate + through the ``instance_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]], + request: spanner_instance_admin.ListInstanceConfigsRequest, + response: spanner_instance_admin.ListInstanceConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.instance_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigOperationsPager: + """A pager for iterating through ``list_instance_config_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstanceConfigOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstanceConfigOperationsResponse], + request: spanner_instance_admin.ListInstanceConfigOperationsRequest, + response: spanner_instance_admin.ListInstanceConfigOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigOperationsAsyncPager: + """A pager for iterating through ``list_instance_config_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstanceConfigOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]], + request: spanner_instance_admin.ListInstanceConfigOperationsRequest, + response: spanner_instance_admin.ListInstanceConfigOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancesResponse], + request: spanner_instance_admin.ListInstancesRequest, + response: spanner_instance_admin.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancesResponse]], + request: spanner_instance_admin.ListInstancesRequest, + response: spanner_instance_admin.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionsPager: + """A pager for iterating through ``list_instance_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instance_partitions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstancePartitions`` requests and continue to iterate + through the ``instance_partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancePartitionsResponse], + request: spanner_instance_admin.ListInstancePartitionsRequest, + response: spanner_instance_admin.ListInstancePartitionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.InstancePartition]: + for page in self.pages: + yield from page.instance_partitions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionsAsyncPager: + """A pager for iterating through ``list_instance_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instance_partitions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstancePartitions`` requests and continue to iterate + through the ``instance_partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]], + request: spanner_instance_admin.ListInstancePartitionsRequest, + response: spanner_instance_admin.ListInstancePartitionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstancePartition]: + async def async_generator(): + async for page in self.pages: + for response in page.instance_partitions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionOperationsPager: + """A pager for iterating through ``list_instance_partition_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstancePartitionOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancePartitionOperationsResponse], + request: spanner_instance_admin.ListInstancePartitionOperationsRequest, + response: spanner_instance_admin.ListInstancePartitionOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionOperationsAsyncPager: + """A pager for iterating through ``list_instance_partition_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstancePartitionOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]], + request: spanner_instance_admin.ListInstancePartitionOperationsRequest, + response: spanner_instance_admin.ListInstancePartitionOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst new file mode 100644 index 0000000000..762ac0c765 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`InstanceAdminTransport` is the ABC for all transports. +- public child `InstanceAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `InstanceAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseInstanceAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `InstanceAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py new file mode 100644 index 0000000000..1892e6a551 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceAdminTransport +from .grpc import InstanceAdminGrpcTransport +from .grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .rest import InstanceAdminRestTransport +from .rest import InstanceAdminRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] +_transport_registry['grpc'] = InstanceAdminGrpcTransport +_transport_registry['grpc_asyncio'] = InstanceAdminGrpcAsyncIOTransport +_transport_registry['rest'] = InstanceAdminRestTransport + +__all__ = ( + 'InstanceAdminTransport', + 'InstanceAdminGrpcTransport', + 'InstanceAdminGrpcAsyncIOTransport', + 'InstanceAdminRestTransport', + 'InstanceAdminRestInterceptor', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py new file mode 100644 index 0000000000..cac35e8288 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -0,0 +1,567 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InstanceAdminTransport(abc.ABC): + """Abstract transport class for InstanceAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instance_configs: gapic_v1.method.wrap_method( + self.list_instance_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_instance_config: gapic_v1.method.wrap_method( + self.get_instance_config, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance_config: gapic_v1.method.wrap_method( + self.create_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_config: gapic_v1.method.wrap_method( + self.update_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_config: gapic_v1.method.wrap_method( + self.delete_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_config_operations: gapic_v1.method.wrap_method( + self.list_instance_config_operations, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_instance_partitions: gapic_v1.method.wrap_method( + self.list_instance_partitions, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.get_instance_partition: gapic_v1.method.wrap_method( + self.get_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.create_instance_partition: gapic_v1.method.wrap_method( + self.create_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_partition: gapic_v1.method.wrap_method( + self.delete_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_partition: gapic_v1.method.wrap_method( + self.update_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_partition_operations: gapic_v1.method.wrap_method( + self.list_instance_partition_operations, + default_timeout=None, + client_info=client_info, + ), + self.move_instance: gapic_v1.method.wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + Union[ + spanner_instance_admin.ListInstanceConfigsResponse, + Awaitable[spanner_instance_admin.ListInstanceConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + Union[ + spanner_instance_admin.InstanceConfig, + Awaitable[spanner_instance_admin.InstanceConfig] + ]]: + raise NotImplementedError() + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + Union[ + spanner_instance_admin.ListInstanceConfigOperationsResponse, + Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + Union[ + spanner_instance_admin.ListInstancesResponse, + Awaitable[spanner_instance_admin.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + Union[ + spanner_instance_admin.ListInstancePartitionsResponse, + Awaitable[spanner_instance_admin.ListInstancePartitionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + Union[ + spanner_instance_admin.Instance, + Awaitable[spanner_instance_admin.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + Union[ + spanner_instance_admin.InstancePartition, + Awaitable[spanner_instance_admin.InstancePartition] + ]]: + raise NotImplementedError() + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + Union[ + spanner_instance_admin.ListInstancePartitionOperationsResponse, + Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'InstanceAdminTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py new file mode 100644 index 0000000000..0319e519cc --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -0,0 +1,1359 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class InstanceAdminGrpcTransport(InstanceAdminTransport): + """gRPC backend transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + spanner_instance_admin.ListInstanceConfigsResponse]: + r"""Return a callable for the list instance configs method over gRPC. + + Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + Returns: + Callable[[~.ListInstanceConfigsRequest], + ~.ListInstanceConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_configs' not in self._stubs: + self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', + request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, + ) + return self._stubs['list_instance_configs'] + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + spanner_instance_admin.InstanceConfig]: + r"""Return a callable for the get instance config method over gRPC. + + Gets information about a particular instance + configuration. + + Returns: + Callable[[~.GetInstanceConfigRequest], + ~.InstanceConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_config' not in self._stubs: + self._stubs['get_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', + request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, + response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, + ) + return self._stubs['get_instance_config'] + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance config method over gRPC. + + Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + Returns: + Callable[[~.CreateInstanceConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_config' not in self._stubs: + self._stubs['create_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', + request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_config'] + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance config method over gRPC. + + Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.UpdateInstanceConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_config' not in self._stubs: + self._stubs['update_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', + request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_config'] + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance config method over gRPC. + + Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.DeleteInstanceConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_config' not in self._stubs: + self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', + request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_config'] + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + spanner_instance_admin.ListInstanceConfigOperationsResponse]: + r"""Return a callable for the list instance config + operations method over gRPC. + + Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Returns: + Callable[[~.ListInstanceConfigOperationsRequest], + ~.ListInstanceConfigOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_config_operations' not in self._stubs: + self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', + request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, + ) + return self._stubs['list_instance_config_operations'] + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + spanner_instance_admin.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in the given project. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', + request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + spanner_instance_admin.ListInstancePartitionsResponse]: + r"""Return a callable for the list instance partitions method over gRPC. + + Lists all instance partitions for the given instance. + + Returns: + Callable[[~.ListInstancePartitionsRequest], + ~.ListInstancePartitionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partitions' not in self._stubs: + self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', + request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, + ) + return self._stubs['list_instance_partitions'] + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + spanner_instance_admin.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets information about a particular instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', + request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, + response_deserializer=spanner_instance_admin.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', + request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', + request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', + request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + spanner_instance_admin.InstancePartition]: + r"""Return a callable for the get instance partition method over gRPC. + + Gets information about a particular instance + partition. + + Returns: + Callable[[~.GetInstancePartitionRequest], + ~.InstancePartition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_partition' not in self._stubs: + self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', + request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, + response_deserializer=spanner_instance_admin.InstancePartition.deserialize, + ) + return self._stubs['get_instance_partition'] + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance partition method over gRPC. + + Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Returns: + Callable[[~.CreateInstancePartitionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_partition' not in self._stubs: + self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', + request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_partition'] + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance partition method over gRPC. + + Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.DeleteInstancePartitionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_partition' not in self._stubs: + self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', + request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_partition'] + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance partition method over gRPC. + + Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.UpdateInstancePartitionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_partition' not in self._stubs: + self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', + request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_partition'] + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + spanner_instance_admin.ListInstancePartitionOperationsResponse]: + r"""Return a callable for the list instance partition + operations method over gRPC. + + Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + Returns: + Callable[[~.ListInstancePartitionOperationsRequest], + ~.ListInstancePartitionOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partition_operations' not in self._stubs: + self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', + request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, + ) + return self._stubs['list_instance_partition_operations'] + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the move instance method over gRPC. + + Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + Returns: + Callable[[~.MoveInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'move_instance' not in self._stubs: + self._stubs['move_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', + request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['move_instance'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'InstanceAdminGrpcTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..01364ea833 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -0,0 +1,1560 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import InstanceAdminGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): + """gRPC AsyncIO backend transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]]: + r"""Return a callable for the list instance configs method over gRPC. + + Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + Returns: + Callable[[~.ListInstanceConfigsRequest], + Awaitable[~.ListInstanceConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_configs' not in self._stubs: + self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', + request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, + ) + return self._stubs['list_instance_configs'] + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + Awaitable[spanner_instance_admin.InstanceConfig]]: + r"""Return a callable for the get instance config method over gRPC. + + Gets information about a particular instance + configuration. + + Returns: + Callable[[~.GetInstanceConfigRequest], + Awaitable[~.InstanceConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_config' not in self._stubs: + self._stubs['get_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', + request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, + response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, + ) + return self._stubs['get_instance_config'] + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance config method over gRPC. + + Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + Returns: + Callable[[~.CreateInstanceConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_config' not in self._stubs: + self._stubs['create_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', + request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_config'] + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance config method over gRPC. + + Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.UpdateInstanceConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_config' not in self._stubs: + self._stubs['update_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', + request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_config'] + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance config method over gRPC. + + Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.DeleteInstanceConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_config' not in self._stubs: + self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', + request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_config'] + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]]: + r"""Return a callable for the list instance config + operations method over gRPC. + + Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Returns: + Callable[[~.ListInstanceConfigOperationsRequest], + Awaitable[~.ListInstanceConfigOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_config_operations' not in self._stubs: + self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', + request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, + ) + return self._stubs['list_instance_config_operations'] + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + Awaitable[spanner_instance_admin.ListInstancesResponse]]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in the given project. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', + request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]]: + r"""Return a callable for the list instance partitions method over gRPC. + + Lists all instance partitions for the given instance. + + Returns: + Callable[[~.ListInstancePartitionsRequest], + Awaitable[~.ListInstancePartitionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partitions' not in self._stubs: + self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', + request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, + ) + return self._stubs['list_instance_partitions'] + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + Awaitable[spanner_instance_admin.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets information about a particular instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', + request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, + response_deserializer=spanner_instance_admin.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', + request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', + request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', + request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + Awaitable[spanner_instance_admin.InstancePartition]]: + r"""Return a callable for the get instance partition method over gRPC. + + Gets information about a particular instance + partition. + + Returns: + Callable[[~.GetInstancePartitionRequest], + Awaitable[~.InstancePartition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_partition' not in self._stubs: + self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', + request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, + response_deserializer=spanner_instance_admin.InstancePartition.deserialize, + ) + return self._stubs['get_instance_partition'] + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance partition method over gRPC. + + Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Returns: + Callable[[~.CreateInstancePartitionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_partition' not in self._stubs: + self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', + request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_partition'] + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance partition method over gRPC. + + Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.DeleteInstancePartitionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_partition' not in self._stubs: + self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', + request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_partition'] + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance partition method over gRPC. + + Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.UpdateInstancePartitionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_partition' not in self._stubs: + self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', + request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_partition'] + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]]: + r"""Return a callable for the list instance partition + operations method over gRPC. + + Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + Returns: + Callable[[~.ListInstancePartitionOperationsRequest], + Awaitable[~.ListInstancePartitionOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partition_operations' not in self._stubs: + self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', + request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, + ) + return self._stubs['list_instance_partition_operations'] + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the move instance method over gRPC. + + Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + Returns: + Callable[[~.MoveInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'move_instance' not in self._stubs: + self._stubs['move_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', + request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['move_instance'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instance_configs: self._wrap_method( + self.list_instance_configs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_instance_config: self._wrap_method( + self.get_instance_config, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance_config: self._wrap_method( + self.create_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_config: self._wrap_method( + self.update_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_config: self._wrap_method( + self.delete_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_config_operations: self._wrap_method( + self.list_instance_config_operations, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: self._wrap_method( + self.list_instances, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_instance_partitions: self._wrap_method( + self.list_instance_partitions, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: self._wrap_method( + self.get_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance: self._wrap_method( + self.create_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.update_instance: self._wrap_method( + self.update_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_instance: self._wrap_method( + self.delete_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.get_instance_partition: self._wrap_method( + self.get_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.create_instance_partition: self._wrap_method( + self.create_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_partition: self._wrap_method( + self.delete_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_partition: self._wrap_method( + self.update_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_partition_operations: self._wrap_method( + self.list_instance_partition_operations, + default_timeout=None, + client_info=client_info, + ), + self.move_instance: self._wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'InstanceAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py new file mode 100644 index 0000000000..1d9124d35d --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py @@ -0,0 +1,4462 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseInstanceAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InstanceAdminRestInterceptor: + """Interceptor for InstanceAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceAdminRestTransport. + + .. code-block:: python + class MyCustomInstanceAdminInterceptor(InstanceAdminRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance_partition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_partition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_config_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_config_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_partition_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_partition_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_partitions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_partitions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance_partition(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceAdminRestTransport(interceptor=MyCustomInstanceAdminInterceptor()) + client = InstanceAdminClient(transport=transport) + + + """ + def pre_create_instance(self, request: spanner_instance_admin.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. + """ + return response + + def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + + def pre_create_instance_config(self, request: spanner_instance_admin.CreateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_create_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance_config + + DEPRECATED. Please use the `post_create_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_create_instance_config` interceptor runs + before the `post_create_instance_config_with_metadata` interceptor. + """ + return response + + def post_create_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_config_with_metadata` + interceptor in new development instead of the `post_create_instance_config` interceptor. + When both interceptors are used, this `post_create_instance_config_with_metadata` interceptor runs after the + `post_create_instance_config` interceptor. The (possibly modified) response returned by + `post_create_instance_config` will be passed to + `post_create_instance_config_with_metadata`. + """ + return response, metadata + + def pre_create_instance_partition(self, request: spanner_instance_admin.CreateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_create_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance_partition + + DEPRECATED. Please use the `post_create_instance_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_create_instance_partition` interceptor runs + before the `post_create_instance_partition_with_metadata` interceptor. + """ + return response + + def post_create_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_partition_with_metadata` + interceptor in new development instead of the `post_create_instance_partition` interceptor. + When both interceptors are used, this `post_create_instance_partition_with_metadata` interceptor runs after the + `post_create_instance_partition` interceptor. The (possibly modified) response returned by + `post_create_instance_partition` will be passed to + `post_create_instance_partition_with_metadata`. + """ + return response, metadata + + def pre_delete_instance(self, request: spanner_instance_admin.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def pre_delete_instance_config(self, request: spanner_instance_admin.DeleteInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def pre_delete_instance_partition(self, request: spanner_instance_admin.DeleteInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_get_instance(self, request: spanner_instance_admin.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_instance(self, response: spanner_instance_admin.Instance) -> spanner_instance_admin.Instance: + """Post-rpc interceptor for get_instance + + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. + """ + return response + + def post_get_instance_with_metadata(self, response: spanner_instance_admin.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + + def pre_get_instance_config(self, request: spanner_instance_admin.GetInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_instance_config(self, response: spanner_instance_admin.InstanceConfig) -> spanner_instance_admin.InstanceConfig: + """Post-rpc interceptor for get_instance_config + + DEPRECATED. Please use the `post_get_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_instance_config` interceptor runs + before the `post_get_instance_config_with_metadata` interceptor. + """ + return response + + def post_get_instance_config_with_metadata(self, response: spanner_instance_admin.InstanceConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstanceConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_config_with_metadata` + interceptor in new development instead of the `post_get_instance_config` interceptor. + When both interceptors are used, this `post_get_instance_config_with_metadata` interceptor runs after the + `post_get_instance_config` interceptor. The (possibly modified) response returned by + `post_get_instance_config` will be passed to + `post_get_instance_config_with_metadata`. + """ + return response, metadata + + def pre_get_instance_partition(self, request: spanner_instance_admin.GetInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_instance_partition(self, response: spanner_instance_admin.InstancePartition) -> spanner_instance_admin.InstancePartition: + """Post-rpc interceptor for get_instance_partition + + DEPRECATED. Please use the `post_get_instance_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_instance_partition` interceptor runs + before the `post_get_instance_partition_with_metadata` interceptor. + """ + return response + + def post_get_instance_partition_with_metadata(self, response: spanner_instance_admin.InstancePartition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstancePartition, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_partition_with_metadata` + interceptor in new development instead of the `post_get_instance_partition` interceptor. + When both interceptors are used, this `post_get_instance_partition_with_metadata` interceptor runs after the + `post_get_instance_partition` interceptor. The (possibly modified) response returned by + `post_get_instance_partition` will be passed to + `post_get_instance_partition_with_metadata`. + """ + return response, metadata + + def pre_list_instance_config_operations(self, request: spanner_instance_admin.ListInstanceConfigOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_config_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_config_operations(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: + """Post-rpc interceptor for list_instance_config_operations + + DEPRECATED. Please use the `post_list_instance_config_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_config_operations` interceptor runs + before the `post_list_instance_config_operations_with_metadata` interceptor. + """ + return response + + def post_list_instance_config_operations_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_config_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_config_operations_with_metadata` + interceptor in new development instead of the `post_list_instance_config_operations` interceptor. + When both interceptors are used, this `post_list_instance_config_operations_with_metadata` interceptor runs after the + `post_list_instance_config_operations` interceptor. The (possibly modified) response returned by + `post_list_instance_config_operations` will be passed to + `post_list_instance_config_operations_with_metadata`. + """ + return response, metadata + + def pre_list_instance_configs(self, request: spanner_instance_admin.ListInstanceConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_configs(self, response: spanner_instance_admin.ListInstanceConfigsResponse) -> spanner_instance_admin.ListInstanceConfigsResponse: + """Post-rpc interceptor for list_instance_configs + + DEPRECATED. Please use the `post_list_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_configs` interceptor runs + before the `post_list_instance_configs_with_metadata` interceptor. + """ + return response + + def post_list_instance_configs_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_configs_with_metadata` + interceptor in new development instead of the `post_list_instance_configs` interceptor. + When both interceptors are used, this `post_list_instance_configs_with_metadata` interceptor runs after the + `post_list_instance_configs` interceptor. The (possibly modified) response returned by + `post_list_instance_configs` will be passed to + `post_list_instance_configs_with_metadata`. + """ + return response, metadata + + def pre_list_instance_partition_operations(self, request: spanner_instance_admin.ListInstancePartitionOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_partition_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_partition_operations(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: + """Post-rpc interceptor for list_instance_partition_operations + + DEPRECATED. Please use the `post_list_instance_partition_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_partition_operations` interceptor runs + before the `post_list_instance_partition_operations_with_metadata` interceptor. + """ + return response + + def post_list_instance_partition_operations_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_partition_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_partition_operations_with_metadata` + interceptor in new development instead of the `post_list_instance_partition_operations` interceptor. + When both interceptors are used, this `post_list_instance_partition_operations_with_metadata` interceptor runs after the + `post_list_instance_partition_operations` interceptor. The (possibly modified) response returned by + `post_list_instance_partition_operations` will be passed to + `post_list_instance_partition_operations_with_metadata`. + """ + return response, metadata + + def pre_list_instance_partitions(self, request: spanner_instance_admin.ListInstancePartitionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_partitions + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_partitions(self, response: spanner_instance_admin.ListInstancePartitionsResponse) -> spanner_instance_admin.ListInstancePartitionsResponse: + """Post-rpc interceptor for list_instance_partitions + + DEPRECATED. Please use the `post_list_instance_partitions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_partitions` interceptor runs + before the `post_list_instance_partitions_with_metadata` interceptor. + """ + return response + + def post_list_instance_partitions_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_partitions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_partitions_with_metadata` + interceptor in new development instead of the `post_list_instance_partitions` interceptor. + When both interceptors are used, this `post_list_instance_partitions_with_metadata` interceptor runs after the + `post_list_instance_partitions` interceptor. The (possibly modified) response returned by + `post_list_instance_partitions` will be passed to + `post_list_instance_partitions_with_metadata`. + """ + return response, metadata + + def pre_list_instances(self, request: spanner_instance_admin.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instances(self, response: spanner_instance_admin.ListInstancesResponse) -> spanner_instance_admin.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. + """ + return response + + def post_list_instances_with_metadata(self, response: spanner_instance_admin.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + def pre_move_instance(self, request: spanner_instance_admin.MoveInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.MoveInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for move_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_move_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for move_instance + + DEPRECATED. Please use the `post_move_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_move_instance` interceptor runs + before the `post_move_instance_with_metadata` interceptor. + """ + return response + + def post_move_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_move_instance_with_metadata` + interceptor in new development instead of the `post_move_instance` interceptor. + When both interceptors are used, this `post_move_instance_with_metadata` interceptor runs after the + `post_move_instance` interceptor. The (possibly modified) response returned by + `post_move_instance` will be passed to + `post_move_instance_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + def pre_update_instance(self, request: spanner_instance_admin.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. + """ + return response + + def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + def pre_update_instance_config(self, request: spanner_instance_admin.UpdateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_update_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance_config + + DEPRECATED. Please use the `post_update_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_update_instance_config` interceptor runs + before the `post_update_instance_config_with_metadata` interceptor. + """ + return response + + def post_update_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_config_with_metadata` + interceptor in new development instead of the `post_update_instance_config` interceptor. + When both interceptors are used, this `post_update_instance_config_with_metadata` interceptor runs after the + `post_update_instance_config` interceptor. The (possibly modified) response returned by + `post_update_instance_config` will be passed to + `post_update_instance_config_with_metadata`. + """ + return response, metadata + + def pre_update_instance_partition(self, request: spanner_instance_admin.UpdateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_update_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance_partition + + DEPRECATED. Please use the `post_update_instance_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_update_instance_partition` interceptor runs + before the `post_update_instance_partition_with_metadata` interceptor. + """ + return response + + def post_update_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_partition_with_metadata` + interceptor in new development instead of the `post_update_instance_partition` interceptor. + When both interceptors are used, this `post_update_instance_partition_with_metadata` interceptor runs after the + `post_update_instance_partition` interceptor. The (possibly modified) response returned by + `post_update_instance_partition` will be passed to + `post_update_instance_partition_with_metadata`. + """ + return response, metadata + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceAdminRestInterceptor + + +class InstanceAdminRestTransport(_BaseInstanceAdminRestTransport): + """REST backend synchronous transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InstanceAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstanceAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(_BaseInstanceAdminRestTransport._BaseCreateInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.spanner_instance_admin.CreateInstanceRequest): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_http_options() + + request, metadata = self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInstanceConfig(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CreateInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.CreateInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.CreateInstanceConfigRequest): + The request object. The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_create_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CreateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_config", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstanceConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInstancePartition(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CreateInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.CreateInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.CreateInstancePartitionRequest): + The request object. The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_create_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CreateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_partition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstancePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInstance(_BaseInstanceAdminRestTransport._BaseDeleteInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete instance method over HTTP. + + Args: + request (~.spanner_instance_admin.DeleteInstanceRequest): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInstanceConfig(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.DeleteInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.DeleteInstanceConfigRequest): + The request object. The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInstancePartition(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.DeleteInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.DeleteInstancePartitionRequest): + The request object. The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIamPolicy(_BaseInstanceAdminRestTransport._BaseGetIamPolicy, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_iam_policy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstance(_BaseInstanceAdminRestTransport._BaseGetInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.spanner_instance_admin.GetInstanceRequest): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetInstance._get_http_options() + + request, metadata = self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.Instance() + pb_resp = spanner_instance_admin.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.Instance.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstanceConfig(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.GetInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.InstanceConfig: + r"""Call the get instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.GetInstanceConfigRequest): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_get_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.InstanceConfig() + pb_resp = spanner_instance_admin.InstanceConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.InstanceConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_config", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstanceConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstancePartition(_BaseInstanceAdminRestTransport._BaseGetInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.GetInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.InstancePartition: + r"""Call the get instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.GetInstancePartitionRequest): + The request object. The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.InstancePartition: + An isolated set of Cloud Spanner + resources that databases can define + placements on. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_get_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.InstancePartition() + pb_resp = spanner_instance_admin.InstancePartition.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.InstancePartition.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_partition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstancePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstanceConfigOperations(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstanceConfigOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstanceConfigOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: + r"""Call the list instance config + operations method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstanceConfigOperationsRequest): + The request object. The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstanceConfigOperationsResponse: + The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_config_operations(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstanceConfigOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstanceConfigOperationsResponse() + pb_resp = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_config_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_config_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_config_operations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstanceConfigs(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstanceConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstanceConfigsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstanceConfigsResponse: + r"""Call the list instance configs method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstanceConfigsRequest): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstanceConfigsResponse: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_configs(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigs", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstanceConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstanceConfigsResponse() + pb_resp = spanner_instance_admin.ListInstanceConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_configs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstanceConfigsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_configs", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstancePartitionOperations(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstancePartitionOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstancePartitionOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: + r"""Call the list instance partition + operations method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstancePartitionOperationsRequest): + The request object. The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstancePartitionOperationsResponse: + The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_partition_operations(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitionOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitionOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstancePartitionOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstancePartitionOperationsResponse() + pb_resp = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_partition_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_partition_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partition_operations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitionOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstancePartitions(_BaseInstanceAdminRestTransport._BaseListInstancePartitions, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstancePartitions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstancePartitionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstancePartitionsResponse: + r"""Call the list instance partitions method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstancePartitionsRequest): + The request object. The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstancePartitionsResponse: + The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_partitions(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstancePartitions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstancePartitionsResponse() + pb_resp = spanner_instance_admin.ListInstancePartitionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_partitions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_partitions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstancePartitionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partitions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstances(_BaseInstanceAdminRestTransport._BaseListInstances, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstances") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstancesRequest): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstancesResponse: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstances._get_http_options() + + request, metadata = self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstances", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstancesResponse() + pb_resp = spanner_instance_admin.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstancesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instances", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _MoveInstance(_BaseInstanceAdminRestTransport._BaseMoveInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.MoveInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.MoveInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the move instance method over HTTP. + + Args: + request (~.spanner_instance_admin.MoveInstanceRequest): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_http_options() + + request, metadata = self._interceptor.pre_move_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.MoveInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "MoveInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._MoveInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_move_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.move_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "MoveInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy(_BaseInstanceAdminRestTransport._BaseSetIamPolicy, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.SetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.set_iam_policy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions(_BaseInstanceAdminRestTransport._BaseTestIamPermissions, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.TestIamPermissions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.test_iam_permissions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstance(_BaseInstanceAdminRestTransport._BaseUpdateInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.UpdateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.spanner_instance_admin.UpdateInstanceRequest): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_http_options() + + request, metadata = self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstanceConfig(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.UpdateInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.UpdateInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.UpdateInstanceConfigRequest): + The request object. The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_update_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._UpdateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_config", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstanceConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstancePartition(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.UpdateInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.UpdateInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.UpdateInstancePartitionRequest): + The request object. The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_update_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._UpdateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_partition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstancePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + spanner_instance_admin.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + spanner_instance_admin.InstanceConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + spanner_instance_admin.InstancePartition]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + spanner_instance_admin.ListInstanceConfigOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstanceConfigOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + spanner_instance_admin.ListInstanceConfigsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + spanner_instance_admin.ListInstancePartitionOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstancePartitionOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + spanner_instance_admin.ListInstancePartitionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstancePartitions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + spanner_instance_admin.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseInstanceAdminRestTransport._BaseCancelOperation, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CancelOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseInstanceAdminRestTransport._BaseDeleteOperation, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseInstanceAdminRestTransport._BaseGetOperation, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseInstanceAdminRestTransport._BaseListOperations, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InstanceAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py new file mode 100644 index 0000000000..ef9a29eaf8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py @@ -0,0 +1,1152 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseInstanceAdminRestTransport(InstanceAdminTransport): + """Base REST backend transport for InstanceAdmin. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/instances', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/instanceConfigs', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.CreateInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.CreateInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.DeleteInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.DeleteInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*}:getIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.GetInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.GetInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstanceConfigOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/instanceConfigOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstanceConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/instanceConfigs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstanceConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstancePartitionOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitionOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstancePartitions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstancePartitionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstances: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/instances', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseMoveInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*}:move', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.MoveInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseMoveInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance.name=projects/*/instances/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance_config.name=projects/*/instanceConfigs/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.UpdateInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.UpdateInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseInstanceAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py new file mode 100644 index 0000000000..385c06fb99 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .common import ( + OperationProgress, + ReplicaSelection, + FulfillmentPeriod, +) +from .spanner_instance_admin import ( + AutoscalingConfig, + CreateInstanceConfigMetadata, + CreateInstanceConfigRequest, + CreateInstanceMetadata, + CreateInstancePartitionMetadata, + CreateInstancePartitionRequest, + CreateInstanceRequest, + DeleteInstanceConfigRequest, + DeleteInstancePartitionRequest, + DeleteInstanceRequest, + FreeInstanceMetadata, + GetInstanceConfigRequest, + GetInstancePartitionRequest, + GetInstanceRequest, + Instance, + InstanceConfig, + InstancePartition, + ListInstanceConfigOperationsRequest, + ListInstanceConfigOperationsResponse, + ListInstanceConfigsRequest, + ListInstanceConfigsResponse, + ListInstancePartitionOperationsRequest, + ListInstancePartitionOperationsResponse, + ListInstancePartitionsRequest, + ListInstancePartitionsResponse, + ListInstancesRequest, + ListInstancesResponse, + MoveInstanceMetadata, + MoveInstanceRequest, + MoveInstanceResponse, + ReplicaComputeCapacity, + ReplicaInfo, + UpdateInstanceConfigMetadata, + UpdateInstanceConfigRequest, + UpdateInstanceMetadata, + UpdateInstancePartitionMetadata, + UpdateInstancePartitionRequest, + UpdateInstanceRequest, +) + +__all__ = ( + 'OperationProgress', + 'ReplicaSelection', + 'FulfillmentPeriod', + 'AutoscalingConfig', + 'CreateInstanceConfigMetadata', + 'CreateInstanceConfigRequest', + 'CreateInstanceMetadata', + 'CreateInstancePartitionMetadata', + 'CreateInstancePartitionRequest', + 'CreateInstanceRequest', + 'DeleteInstanceConfigRequest', + 'DeleteInstancePartitionRequest', + 'DeleteInstanceRequest', + 'FreeInstanceMetadata', + 'GetInstanceConfigRequest', + 'GetInstancePartitionRequest', + 'GetInstanceRequest', + 'Instance', + 'InstanceConfig', + 'InstancePartition', + 'ListInstanceConfigOperationsRequest', + 'ListInstanceConfigOperationsResponse', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'ListInstancePartitionOperationsRequest', + 'ListInstancePartitionOperationsResponse', + 'ListInstancePartitionsRequest', + 'ListInstancePartitionsResponse', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'MoveInstanceMetadata', + 'MoveInstanceRequest', + 'MoveInstanceResponse', + 'ReplicaComputeCapacity', + 'ReplicaInfo', + 'UpdateInstanceConfigMetadata', + 'UpdateInstanceConfigRequest', + 'UpdateInstanceMetadata', + 'UpdateInstancePartitionMetadata', + 'UpdateInstancePartitionRequest', + 'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py new file mode 100644 index 0000000000..5bc1c6b158 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.instance.v1', + manifest={ + 'FulfillmentPeriod', + 'OperationProgress', + 'ReplicaSelection', + }, +) + + +class FulfillmentPeriod(proto.Enum): + r"""Indicates the expected fulfillment period of an operation. + + Values: + FULFILLMENT_PERIOD_UNSPECIFIED (0): + Not specified. + FULFILLMENT_PERIOD_NORMAL (1): + Normal fulfillment period. The operation is + expected to complete within minutes. + FULFILLMENT_PERIOD_EXTENDED (2): + Extended fulfillment period. It can take up + to an hour for the operation to complete. + """ + FULFILLMENT_PERIOD_UNSPECIFIED = 0 + FULFILLMENT_PERIOD_NORMAL = 1 + FULFILLMENT_PERIOD_EXTENDED = 2 + + +class OperationProgress(proto.Message): + r"""Encapsulates progress related information for a Cloud Spanner + long running instance operations. + + Attributes: + progress_percent (int): + Percent completion of the operation. + Values are between 0 and 100 inclusive. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time the request was received. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If set, the time at which this operation + failed or was completed successfully. + """ + + progress_percent: int = proto.Field( + proto.INT32, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ReplicaSelection(proto.Message): + r"""ReplicaSelection identifies replicas with common properties. + + Attributes: + location (str): + Required. Name of the location of the + replicas (e.g., "us-central1"). + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py new file mode 100644 index 0000000000..b057141470 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -0,0 +1,2366 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.instance.v1', + manifest={ + 'ReplicaInfo', + 'InstanceConfig', + 'ReplicaComputeCapacity', + 'AutoscalingConfig', + 'Instance', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'GetInstanceConfigRequest', + 'CreateInstanceConfigRequest', + 'UpdateInstanceConfigRequest', + 'DeleteInstanceConfigRequest', + 'ListInstanceConfigOperationsRequest', + 'ListInstanceConfigOperationsResponse', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'UpdateInstanceRequest', + 'DeleteInstanceRequest', + 'CreateInstanceMetadata', + 'UpdateInstanceMetadata', + 'FreeInstanceMetadata', + 'CreateInstanceConfigMetadata', + 'UpdateInstanceConfigMetadata', + 'InstancePartition', + 'CreateInstancePartitionMetadata', + 'CreateInstancePartitionRequest', + 'DeleteInstancePartitionRequest', + 'GetInstancePartitionRequest', + 'UpdateInstancePartitionRequest', + 'UpdateInstancePartitionMetadata', + 'ListInstancePartitionsRequest', + 'ListInstancePartitionsResponse', + 'ListInstancePartitionOperationsRequest', + 'ListInstancePartitionOperationsResponse', + 'MoveInstanceRequest', + 'MoveInstanceResponse', + 'MoveInstanceMetadata', + }, +) + + +class ReplicaInfo(proto.Message): + r""" + + Attributes: + location (str): + The location of the serving resources, e.g., + "us-central1". + type_ (google.cloud.spanner_admin_instance_v1.types.ReplicaInfo.ReplicaType): + The type of replica. + default_leader_location (bool): + If true, this location is designated as the default leader + location where leader replicas are placed. See the `region + types + documentation `__ + for more details. + """ + class ReplicaType(proto.Enum): + r"""Indicates the type of replica. See the `replica types + documentation `__ + for more details. + + Values: + TYPE_UNSPECIFIED (0): + Not specified. + READ_WRITE (1): + Read-write replicas support both reads and writes. These + replicas: + + - Maintain a full copy of your data. + - Serve reads. + - Can vote whether to commit a write. + - Participate in leadership election. + - Are eligible to become a leader. + READ_ONLY (2): + Read-only replicas only support reads (not writes). + Read-only replicas: + + - Maintain a full copy of your data. + - Serve reads. + - Do not participate in voting to commit writes. + - Are not eligible to become a leader. + WITNESS (3): + Witness replicas don't support reads but do participate in + voting to commit writes. Witness replicas: + + - Do not maintain a full copy of data. + - Do not serve reads. + - Vote whether to commit writes. + - Participate in leader election but are not eligible to + become leader. + """ + TYPE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + WITNESS = 3 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + type_: ReplicaType = proto.Field( + proto.ENUM, + number=2, + enum=ReplicaType, + ) + default_leader_location: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class InstanceConfig(proto.Message): + r"""A possible configuration for a Cloud Spanner instance. + Configurations define the geographic placement of nodes and + their replication. + + Attributes: + name (str): + A unique identifier for the instance configuration. Values + are of the form + ``projects//instanceConfigs/[a-z][-a-z0-9]*``. + + User instance configuration must start with ``custom-``. + display_name (str): + The name of this instance configuration as it + appears in UIs. + config_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.Type): + Output only. Whether this instance + configuration is a Google-managed or + user-managed configuration. + replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): + The geographic placement of nodes in this instance + configuration and their replication properties. + + To create user-managed configurations, input ``replicas`` + must include all replicas in ``replicas`` of the + ``base_config`` and include one or more replicas in the + ``optional_replicas`` of the ``base_config``. + optional_replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): + Output only. The available optional replicas + to choose from for user-managed configurations. + Populated for Google-managed configurations. + base_config (str): + Base configuration name, e.g. + projects//instanceConfigs/nam3, based on which + this configuration is created. Only set for user-managed + configurations. ``base_config`` must refer to a + configuration of type ``GOOGLE_MANAGED`` in the same project + as this configuration. + labels (MutableMapping[str, str]): + Cloud Labels are a flexible and lightweight mechanism for + organizing cloud resources into groups that reflect a + customer's organizational needs and deployment strategies. + Cloud Labels can be used to filter collections of resources. + They can be used to control how resource metrics are + aggregated. And they can be used as arguments to policy + management rules (e.g. route, firewall, load balancing, + etc.). + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z][a-z0-9_-]{0,62}``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``[a-z0-9_-]{0,63}``. + - No more than 64 labels can be associated with a given + resource. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + + If you plan to use labels in your own code, please note that + additional characters may be allowed in the future. + Therefore, you are advised to use an internal label + representation, such as JSON, which doesn't rely upon + specific characters being disallowed. For example, + representing labels as the string: name + "*" + value would + prove problematic if we were to allow "*" in a future + release. + etag (str): + etag is used for optimistic concurrency + control as a way to help prevent simultaneous + updates of a instance configuration from + overwriting each other. It is strongly suggested + that systems make use of the etag in the + read-modify-write cycle to perform instance + configuration updates in order to avoid race + conditions: An etag is returned in the response + which contains instance configurations, and + systems are expected to put that etag in the + request to update instance configuration to + ensure that their change is applied to the same + version of the instance configuration. If no + etag is provided in the call to update the + instance configuration, then the existing + instance configuration is overwritten blindly. + leader_options (MutableSequence[str]): + Allowed values of the "default_leader" schema option for + databases in instances that use this instance configuration. + reconciling (bool): + Output only. If true, the instance + configuration is being created or updated. If + false, there are no ongoing operations for the + instance configuration. + state (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.State): + Output only. The current instance configuration state. + Applicable only for ``USER_MANAGED`` configurations. + free_instance_availability (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.FreeInstanceAvailability): + Output only. Describes whether free instances + are available to be created in this instance + configuration. + quorum_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.QuorumType): + Output only. The ``QuorumType`` of the instance + configuration. + storage_limit_per_processing_unit (int): + Output only. The storage limit in bytes per + processing unit. + """ + class Type(proto.Enum): + r"""The type of this configuration. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified. + GOOGLE_MANAGED (1): + Google-managed configuration. + USER_MANAGED (2): + User-managed configuration. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_MANAGED = 1 + USER_MANAGED = 2 + + class State(proto.Enum): + r"""Indicates the current state of the instance configuration. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The instance configuration is still being + created. + READY (2): + The instance configuration is fully created + and ready to be used to create instances. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + class FreeInstanceAvailability(proto.Enum): + r"""Describes the availability for free instances to be created + in an instance configuration. + + Values: + FREE_INSTANCE_AVAILABILITY_UNSPECIFIED (0): + Not specified. + AVAILABLE (1): + Indicates that free instances are available + to be created in this instance configuration. + UNSUPPORTED (2): + Indicates that free instances are not + supported in this instance configuration. + DISABLED (3): + Indicates that free instances are currently + not available to be created in this instance + configuration. + QUOTA_EXCEEDED (4): + Indicates that additional free instances + cannot be created in this instance configuration + because the project has reached its limit of + free instances. + """ + FREE_INSTANCE_AVAILABILITY_UNSPECIFIED = 0 + AVAILABLE = 1 + UNSUPPORTED = 2 + DISABLED = 3 + QUOTA_EXCEEDED = 4 + + class QuorumType(proto.Enum): + r"""Indicates the quorum type of this instance configuration. + + Values: + QUORUM_TYPE_UNSPECIFIED (0): + Quorum type not specified. + REGION (1): + An instance configuration tagged with ``REGION`` quorum type + forms a write quorum in a single region. + DUAL_REGION (2): + An instance configuration tagged with the ``DUAL_REGION`` + quorum type forms a write quorum with exactly two read-write + regions in a multi-region configuration. + + This instance configuration requires failover in the event + of regional failures. + MULTI_REGION (3): + An instance configuration tagged with the ``MULTI_REGION`` + quorum type forms a write quorum from replicas that are + spread across more than one region in a multi-region + configuration. + """ + QUORUM_TYPE_UNSPECIFIED = 0 + REGION = 1 + DUAL_REGION = 2 + MULTI_REGION = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + config_type: Type = proto.Field( + proto.ENUM, + number=5, + enum=Type, + ) + replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ReplicaInfo', + ) + optional_replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ReplicaInfo', + ) + base_config: str = proto.Field( + proto.STRING, + number=7, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + leader_options: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=10, + ) + state: State = proto.Field( + proto.ENUM, + number=11, + enum=State, + ) + free_instance_availability: FreeInstanceAvailability = proto.Field( + proto.ENUM, + number=12, + enum=FreeInstanceAvailability, + ) + quorum_type: QuorumType = proto.Field( + proto.ENUM, + number=18, + enum=QuorumType, + ) + storage_limit_per_processing_unit: int = proto.Field( + proto.INT64, + number=19, + ) + + +class ReplicaComputeCapacity(proto.Message): + r"""ReplicaComputeCapacity describes the amount of server + resources that are allocated to each replica identified by the + replica selection. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): + Required. Identifies replicas by specified + properties. All replicas in the selection have + the same amount of compute capacity. + node_count (int): + The number of nodes allocated to each replica. + + This may be zero in API responses for instances that are not + yet in state ``READY``. + + This field is a member of `oneof`_ ``compute_capacity``. + processing_units (int): + The number of processing units allocated to each replica. + + This may be zero in API responses for instances that are not + yet in state ``READY``. + + This field is a member of `oneof`_ ``compute_capacity``. + """ + + replica_selection: common.ReplicaSelection = proto.Field( + proto.MESSAGE, + number=1, + message=common.ReplicaSelection, + ) + node_count: int = proto.Field( + proto.INT32, + number=2, + oneof='compute_capacity', + ) + processing_units: int = proto.Field( + proto.INT32, + number=3, + oneof='compute_capacity', + ) + + +class AutoscalingConfig(proto.Message): + r"""Autoscaling configuration for an instance. + + Attributes: + autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): + Required. Autoscaling limits for an instance. + autoscaling_targets (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingTargets): + Required. The autoscaling targets for an + instance. + asymmetric_autoscaling_options (MutableSequence[google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption]): + Optional. Optional asymmetric autoscaling + options. Replicas matching the replica selection + criteria will be autoscaled independently from + other replicas. The autoscaler will scale the + replicas based on the utilization of replicas + identified by the replica selection. Replica + selections should not overlap with each other. + + Other replicas (those do not match any replica + selection) will be autoscaled together and will + have the same compute capacity allocated to + them. + """ + + class AutoscalingLimits(proto.Message): + r"""The autoscaling limits for the instance. Users can define the + minimum and maximum compute capacity allocated to the instance, and + the autoscaler will only scale within that range. Users can either + use nodes or processing units to specify the limits, but should use + the same unit to set both the min_limit and max_limit. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_nodes (int): + Minimum number of nodes allocated to the + instance. If set, this number should be greater + than or equal to 1. + + This field is a member of `oneof`_ ``min_limit``. + min_processing_units (int): + Minimum number of processing units allocated + to the instance. If set, this number should be + multiples of 1000. + + This field is a member of `oneof`_ ``min_limit``. + max_nodes (int): + Maximum number of nodes allocated to the instance. If set, + this number should be greater than or equal to min_nodes. + + This field is a member of `oneof`_ ``max_limit``. + max_processing_units (int): + Maximum number of processing units allocated to the + instance. If set, this number should be multiples of 1000 + and be greater than or equal to min_processing_units. + + This field is a member of `oneof`_ ``max_limit``. + """ + + min_nodes: int = proto.Field( + proto.INT32, + number=1, + oneof='min_limit', + ) + min_processing_units: int = proto.Field( + proto.INT32, + number=2, + oneof='min_limit', + ) + max_nodes: int = proto.Field( + proto.INT32, + number=3, + oneof='max_limit', + ) + max_processing_units: int = proto.Field( + proto.INT32, + number=4, + oneof='max_limit', + ) + + class AutoscalingTargets(proto.Message): + r"""The autoscaling targets for an instance. + + Attributes: + high_priority_cpu_utilization_percent (int): + Required. The target high priority cpu utilization + percentage that the autoscaler should be trying to achieve + for the instance. This number is on a scale from 0 (no + utilization) to 100 (full utilization). The valid range is + [10, 90] inclusive. + storage_utilization_percent (int): + Required. The target storage utilization percentage that the + autoscaler should be trying to achieve for the instance. + This number is on a scale from 0 (no utilization) to 100 + (full utilization). The valid range is [10, 99] inclusive. + """ + + high_priority_cpu_utilization_percent: int = proto.Field( + proto.INT32, + number=1, + ) + storage_utilization_percent: int = proto.Field( + proto.INT32, + number=2, + ) + + class AsymmetricAutoscalingOption(proto.Message): + r"""AsymmetricAutoscalingOption specifies the scaling of replicas + identified by the given selection. + + Attributes: + replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): + Required. Selects the replicas to which this + AsymmetricAutoscalingOption applies. Only + read-only replicas are supported. + overrides (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides): + Optional. Overrides applied to the top-level + autoscaling configuration for the selected + replicas. + """ + + class AutoscalingConfigOverrides(proto.Message): + r"""Overrides the top-level autoscaling configuration for the replicas + identified by ``replica_selection``. All fields in this message are + optional. Any unspecified fields will use the corresponding values + from the top-level autoscaling configuration. + + Attributes: + autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): + Optional. If specified, overrides the min/max + limit in the top-level autoscaling configuration + for the selected replicas. + autoscaling_target_high_priority_cpu_utilization_percent (int): + Optional. If specified, overrides the autoscaling target + high_priority_cpu_utilization_percent in the top-level + autoscaling configuration for the selected replicas. + """ + + autoscaling_limits: 'AutoscalingConfig.AutoscalingLimits' = proto.Field( + proto.MESSAGE, + number=1, + message='AutoscalingConfig.AutoscalingLimits', + ) + autoscaling_target_high_priority_cpu_utilization_percent: int = proto.Field( + proto.INT32, + number=2, + ) + + replica_selection: common.ReplicaSelection = proto.Field( + proto.MESSAGE, + number=1, + message=common.ReplicaSelection, + ) + overrides: 'AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides' = proto.Field( + proto.MESSAGE, + number=2, + message='AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides', + ) + + autoscaling_limits: AutoscalingLimits = proto.Field( + proto.MESSAGE, + number=1, + message=AutoscalingLimits, + ) + autoscaling_targets: AutoscalingTargets = proto.Field( + proto.MESSAGE, + number=2, + message=AutoscalingTargets, + ) + asymmetric_autoscaling_options: MutableSequence[AsymmetricAutoscalingOption] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=AsymmetricAutoscalingOption, + ) + + +class Instance(proto.Message): + r"""An isolated set of Cloud Spanner resources on which databases + can be hosted. + + Attributes: + name (str): + Required. A unique identifier for the instance, which cannot + be changed after the instance is created. Values are of the + form + ``projects//instances/[a-z][-a-z0-9]*[a-z0-9]``. + The final segment of the name must be between 2 and 64 + characters in length. + config (str): + Required. The name of the instance's configuration. Values + are of the form + ``projects//instanceConfigs/``. See + also + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + and + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + display_name (str): + Required. The descriptive name for this + instance as it appears in UIs. Must be unique + per project and between 4 and 30 characters in + length. + node_count (int): + The number of nodes allocated to this instance. At most, one + of either ``node_count`` or ``processing_units`` should be + present in the message. + + Users can set the ``node_count`` field to specify the target + number of nodes allocated to the instance. + + If autoscaling is enabled, ``node_count`` is treated as an + ``OUTPUT_ONLY`` field and reflects the current number of + nodes allocated to the instance. + + This might be zero in API responses for instances that are + not yet in the ``READY`` state. + + For more information, see `Compute capacity, nodes, and + processing + units `__. + processing_units (int): + The number of processing units allocated to this instance. + At most, one of either ``processing_units`` or + ``node_count`` should be present in the message. + + Users can set the ``processing_units`` field to specify the + target number of processing units allocated to the instance. + + If autoscaling is enabled, ``processing_units`` is treated + as an ``OUTPUT_ONLY`` field and reflects the current number + of processing units allocated to the instance. + + This might be zero in API responses for instances that are + not yet in the ``READY`` state. + + For more information, see `Compute capacity, nodes and + processing + units `__. + replica_compute_capacity (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaComputeCapacity]): + Output only. Lists the compute capacity per + ReplicaSelection. A replica selection identifies + a set of replicas with common properties. + Replicas identified by a ReplicaSelection are + scaled with the same compute capacity. + autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): + Optional. The autoscaling configuration. Autoscaling is + enabled if this field is set. When autoscaling is enabled, + node_count and processing_units are treated as OUTPUT_ONLY + fields and reflect the current compute capacity allocated to + the instance. + state (google.cloud.spanner_admin_instance_v1.types.Instance.State): + Output only. The current instance state. For + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], + the state must be either omitted or set to ``CREATING``. For + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], + the state must be either omitted or set to ``READY``. + labels (MutableMapping[str, str]): + Cloud Labels are a flexible and lightweight mechanism for + organizing cloud resources into groups that reflect a + customer's organizational needs and deployment strategies. + Cloud Labels can be used to filter collections of resources. + They can be used to control how resource metrics are + aggregated. And they can be used as arguments to policy + management rules (e.g. route, firewall, load balancing, + etc.). + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z][a-z0-9_-]{0,62}``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``[a-z0-9_-]{0,63}``. + - No more than 64 labels can be associated with a given + resource. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + + If you plan to use labels in your own code, please note that + additional characters may be allowed in the future. And so + you are advised to use an internal label representation, + such as JSON, which doesn't rely upon specific characters + being disallowed. For example, representing labels as the + string: name + "*" + value would prove problematic if we + were to allow "*" in a future release. + instance_type (google.cloud.spanner_admin_instance_v1.types.Instance.InstanceType): + The ``InstanceType`` of the current instance. + endpoint_uris (MutableSequence[str]): + Deprecated. This field is not populated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + was most recently updated. + free_instance_metadata (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata): + Free instance metadata. Only populated for + free instances. + edition (google.cloud.spanner_admin_instance_v1.types.Instance.Edition): + Optional. The ``Edition`` of the current instance. + default_backup_schedule_type (google.cloud.spanner_admin_instance_v1.types.Instance.DefaultBackupScheduleType): + Optional. Controls the default backup schedule behavior for + new databases within the instance. By default, a backup + schedule is created automatically when a new database is + created in a new instance. + + Note that the ``AUTOMATIC`` value isn't permitted for free + instances, as backups and backup schedules aren't supported + for free instances. + + In the ``GetInstance`` or ``ListInstances`` response, if the + value of ``default_backup_schedule_type`` isn't set, or set + to ``NONE``, Spanner doesn't create a default backup + schedule for new databases in the instance. + """ + class State(proto.Enum): + r"""Indicates the current state of the instance. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The instance is still being created. + Resources may not be available yet, and + operations such as database creation may not + work. + READY (2): + The instance is fully created and ready to do + work such as creating databases. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + class InstanceType(proto.Enum): + r"""The type of this instance. The type can be used to distinguish + product variants, that can affect aspects like: usage restrictions, + quotas and billing. Currently this is used to distinguish + FREE_INSTANCE vs PROVISIONED instances. + + Values: + INSTANCE_TYPE_UNSPECIFIED (0): + Not specified. + PROVISIONED (1): + Provisioned instances have dedicated + resources, standard usage limits and support. + FREE_INSTANCE (2): + Free instances provide no guarantee for dedicated resources, + [node_count, processing_units] should be 0. They come with + stricter usage limits and limited support. + """ + INSTANCE_TYPE_UNSPECIFIED = 0 + PROVISIONED = 1 + FREE_INSTANCE = 2 + + class Edition(proto.Enum): + r"""The edition selected for this instance. Different editions + provide different capabilities at different price points. + + Values: + EDITION_UNSPECIFIED (0): + Edition not specified. + STANDARD (1): + Standard edition. + ENTERPRISE (2): + Enterprise edition. + ENTERPRISE_PLUS (3): + Enterprise Plus edition. + """ + EDITION_UNSPECIFIED = 0 + STANDARD = 1 + ENTERPRISE = 2 + ENTERPRISE_PLUS = 3 + + class DefaultBackupScheduleType(proto.Enum): + r"""Indicates the `default backup + schedule `__ + behavior for new databases within the instance. + + Values: + DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED (0): + Not specified. + NONE (1): + A default backup schedule isn't created + automatically when a new database is created in + the instance. + AUTOMATIC (2): + A default backup schedule is created + automatically when a new database is created in + the instance. The default backup schedule + creates a full backup every 24 hours. These full + backups are retained for 7 days. You can edit or + delete the default backup schedule once it's + created. + """ + DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED = 0 + NONE = 1 + AUTOMATIC = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + node_count: int = proto.Field( + proto.INT32, + number=5, + ) + processing_units: int = proto.Field( + proto.INT32, + number=9, + ) + replica_compute_capacity: MutableSequence['ReplicaComputeCapacity'] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message='ReplicaComputeCapacity', + ) + autoscaling_config: 'AutoscalingConfig' = proto.Field( + proto.MESSAGE, + number=17, + message='AutoscalingConfig', + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + instance_type: InstanceType = proto.Field( + proto.ENUM, + number=10, + enum=InstanceType, + ) + endpoint_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + free_instance_metadata: 'FreeInstanceMetadata' = proto.Field( + proto.MESSAGE, + number=13, + message='FreeInstanceMetadata', + ) + edition: Edition = proto.Field( + proto.ENUM, + number=20, + enum=Edition, + ) + default_backup_schedule_type: DefaultBackupScheduleType = proto.Field( + proto.ENUM, + number=23, + enum=DefaultBackupScheduleType, + ) + + +class ListInstanceConfigsRequest(proto.Message): + r"""The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Attributes: + parent (str): + Required. The name of the project for which a list of + supported instance configurations is requested. Values are + of the form ``projects/``. + page_size (int): + Number of instance configurations to be + returned in the response. If 0 or less, defaults + to the server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] + from a previous + [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInstanceConfigsResponse(proto.Message): + r"""The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Attributes: + instance_configs (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstanceConfig]): + The list of requested instance + configurations. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] + call to fetch more of the matching instance configurations. + """ + + @property + def raw_page(self): + return self + + instance_configs: MutableSequence['InstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstanceConfigRequest(proto.Message): + r"""The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + + Attributes: + name (str): + Required. The name of the requested instance configuration. + Values are of the form + ``projects//instanceConfigs/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceConfigRequest(proto.Message): + r"""The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + + Attributes: + parent (str): + Required. The name of the project in which to create the + instance configuration. Values are of the form + ``projects/``. + instance_config_id (str): + Required. The ID of the instance configuration to create. + Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. The ``custom-`` prefix is required to + avoid name conflicts with Google-managed configurations. + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The ``InstanceConfig`` proto of the configuration + to create. ``instance_config.name`` must be + ``/instanceConfigs/``. + ``instance_config.base_config`` must be a Google-managed + configuration name, e.g. /instanceConfigs/us-east1, + /instanceConfigs/nam3. + validate_only (bool): + An option to validate, but not actually + execute, a request, and provide the same + response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InstanceConfig', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateInstanceConfigRequest(proto.Message): + r"""The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + + Attributes: + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The user instance configuration to update, which + must always include the instance configuration name. + Otherwise, only fields mentioned in + [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] + need be included. To prevent conflicts of concurrent + updates, + [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + can be used. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + should be updated. The field mask must always be specified; + this prevents any future fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + from being erased accidentally by clients that do not know + about them. Only display_name and labels can be updated. + validate_only (bool): + An option to validate, but not actually + execute, a request, and provide the same + response. + """ + + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteInstanceConfigRequest(proto.Message): + r"""The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + + Attributes: + name (str): + Required. The name of the instance configuration to be + deleted. Values are of the form + ``projects//instanceConfigs/`` + etag (str): + Used for optimistic concurrency control as a + way to help prevent simultaneous deletes of an + instance configuration from overwriting each + other. If not empty, the API + only deletes the instance configuration when the + etag provided matches the current status of the + requested instance configuration. Otherwise, + deletes the instance configuration without + checking the current status of the requested + instance configuration. + validate_only (bool): + An option to validate, but not actually + execute, a request, and provide the same + response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListInstanceConfigOperationsRequest(proto.Message): + r"""The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Attributes: + parent (str): + Required. The project of the instance configuration + operations. Values are of the form ``projects/``. + filter (str): + An expression that filters the list of returned operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the Operation are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata] + is + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first, if filtering + on metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=`` + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata) AND`` + ``(metadata.instance_config.name:custom-config) AND`` + ``(metadata.progress.start_time < \"2021-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + - The instance configuration name contains + "custom-config". + - The operation started before 2021-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse.next_page_token] + from a previous + [ListInstanceConfigOperationsResponse][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListInstanceConfigOperationsResponse(proto.Message): + r"""The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching instance configuration long-running + operations. Each operation's name will be prefixed by the + name of the instance configuration. The operation's metadata + field type ``metadata.type_url`` describes the type of the + metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstanceRequest(proto.Message): + r"""The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + + Attributes: + name (str): + Required. The name of the requested instance. Values are of + the form ``projects//instances/``. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + If field_mask is present, specifies the subset of + [Instance][google.spanner.admin.instance.v1.Instance] fields + that should be returned. If absent, all + [Instance][google.spanner.admin.instance.v1.Instance] fields + are returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + field_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateInstanceRequest(proto.Message): + r"""The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + + Attributes: + parent (str): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + instance_id (str): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and + must be between 2 and 64 characters in length. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to create. The name may be omitted, + but if specified must be + ``/instances/``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + + +class ListInstancesRequest(proto.Message): + r"""The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Attributes: + parent (str): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + page_size (int): + Number of instances to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] + from a previous + [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. + filter (str): + An expression for filtering the results of the request. + Filter rules are case insensitive. The fields eligible for + filtering are: + + - ``name`` + - ``display_name`` + - ``labels.key`` where key is the name of a label + + Some examples of using filters are: + + - ``name:*`` --> The instance has a name. + - ``name:Howl`` --> The instance's name contains the string + "howl". + - ``name:HOWL`` --> Equivalent to above. + - ``NAME:howl`` --> Equivalent to above. + - ``labels.env:*`` --> The instance has the label "env". + - ``labels.env:dev`` --> The instance has the label "env" + and the value of the label contains the string "dev". + - ``name:howl labels.env:dev`` --> The instance's name + contains "howl" and it has the label "env" with its value + containing "dev". + instance_deadline (google.protobuf.timestamp_pb2.Timestamp): + Deadline used while retrieving metadata for instances. + Instances whose metadata cannot be retrieved within this + deadline will be added to + [unreachable][google.spanner.admin.instance.v1.ListInstancesResponse.unreachable] + in + [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + instance_deadline: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancesResponse(proto.Message): + r"""The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Attributes: + instances (MutableSequence[google.cloud.spanner_admin_instance_v1.types.Instance]): + The list of requested instances. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] + call to fetch more of the matching instances. + unreachable (MutableSequence[str]): + The list of unreachable instances. It includes the names of + instances whose metadata could not be retrieved within + [instance_deadline][google.spanner.admin.instance.v1.ListInstancesRequest.instance_deadline]. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to update, which must always include + the instance name. Otherwise, only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] should + be updated. The field mask must always be specified; this + prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] from + being erased accidentally by clients that do not know about + them. + """ + + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + field_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + + Attributes: + name (str): + Required. The name of the instance to be deleted. Values are + of the form ``projects//instances/`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + The instance being created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): + The expected fulfillment period of this + create operation. + """ + + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( + proto.ENUM, + number=5, + enum=common.FulfillmentPeriod, + ) + + +class UpdateInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + The desired end state of the update. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): + The expected fulfillment period of this + update operation. + """ + + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( + proto.ENUM, + number=5, + enum=common.FulfillmentPeriod, + ) + + +class FreeInstanceMetadata(proto.Message): + r"""Free instance specific metadata that is kept even after an + instance has been upgraded for tracking purposes. + + Attributes: + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp after which the + instance will either be upgraded or scheduled + for deletion after a grace period. + ExpireBehavior is used to choose between + upgrading or scheduling the free instance for + deletion. This timestamp is set during the + creation of a free instance. + upgrade_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If present, the timestamp at + which the free instance was upgraded to a + provisioned instance. + expire_behavior (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata.ExpireBehavior): + Specifies the expiration behavior of a free instance. The + default of ExpireBehavior is ``REMOVE_AFTER_GRACE_PERIOD``. + This can be modified during or after creation, and before + expiration. + """ + class ExpireBehavior(proto.Enum): + r"""Allows users to change behavior when a free instance expires. + + Values: + EXPIRE_BEHAVIOR_UNSPECIFIED (0): + Not specified. + FREE_TO_PROVISIONED (1): + When the free instance expires, upgrade the + instance to a provisioned instance. + REMOVE_AFTER_GRACE_PERIOD (2): + When the free instance expires, disable the + instance, and delete it after the grace period + passes if it has not been upgraded. + """ + EXPIRE_BEHAVIOR_UNSPECIFIED = 0 + FREE_TO_PROVISIONED = 1 + REMOVE_AFTER_GRACE_PERIOD = 2 + + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + upgrade_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + expire_behavior: ExpireBehavior = proto.Field( + proto.ENUM, + number=3, + enum=ExpireBehavior, + ) + + +class CreateInstanceConfigMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + + Attributes: + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + The target instance configuration end state. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateInstanceConfigMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + + Attributes: + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + The desired instance configuration after + updating. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InstancePartition(proto.Message): + r"""An isolated set of Cloud Spanner resources that databases can + define placements on. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. A unique identifier for the instance partition. + Values are of the form + ``projects//instances//instancePartitions/[a-z][-a-z0-9]*[a-z0-9]``. + The final segment of the name must be between 2 and 64 + characters in length. An instance partition's name cannot be + changed after the instance partition is created. + config (str): + Required. The name of the instance partition's + configuration. Values are of the form + ``projects//instanceConfigs/``. See + also + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + and + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + display_name (str): + Required. The descriptive name for this + instance partition as it appears in UIs. Must be + unique per project and between 4 and 30 + characters in length. + node_count (int): + The number of nodes allocated to this instance partition. + + Users can set the ``node_count`` field to specify the target + number of nodes allocated to the instance partition. + + This may be zero in API responses for instance partitions + that are not yet in state ``READY``. + + This field is a member of `oneof`_ ``compute_capacity``. + processing_units (int): + The number of processing units allocated to this instance + partition. + + Users can set the ``processing_units`` field to specify the + target number of processing units allocated to the instance + partition. + + This might be zero in API responses for instance partitions + that are not yet in the ``READY`` state. + + This field is a member of `oneof`_ ``compute_capacity``. + state (google.cloud.spanner_admin_instance_v1.types.InstancePartition.State): + Output only. The current instance partition + state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + partition was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + partition was most recently updated. + referencing_databases (MutableSequence[str]): + Output only. The names of the databases that + reference this instance partition. Referencing + databases should share the parent instance. The + existence of any referencing database prevents + the instance partition from being deleted. + referencing_backups (MutableSequence[str]): + Output only. Deprecated: This field is not + populated. Output only. The names of the backups + that reference this instance partition. + Referencing backups should share the parent + instance. The existence of any referencing + backup prevents the instance partition from + being deleted. + etag (str): + Used for optimistic concurrency control as a + way to help prevent simultaneous updates of a + instance partition from overwriting each other. + It is strongly suggested that systems make use + of the etag in the read-modify-write cycle to + perform instance partition updates in order to + avoid race conditions: An etag is returned in + the response which contains instance partitions, + and systems are expected to put that etag in the + request to update instance partitions to ensure + that their change will be applied to the same + version of the instance partition. If no etag is + provided in the call to update instance + partition, then the existing instance partition + is overwritten blindly. + """ + class State(proto.Enum): + r"""Indicates the current state of the instance partition. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The instance partition is still being + created. Resources may not be available yet, and + operations such as creating placements using + this instance partition may not work. + READY (2): + The instance partition is fully created and + ready to do work such as creating placements and + using in databases. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + node_count: int = proto.Field( + proto.INT32, + number=5, + oneof='compute_capacity', + ) + processing_units: int = proto.Field( + proto.INT32, + number=6, + oneof='compute_capacity', + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + referencing_databases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + referencing_backups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + etag: str = proto.Field( + proto.STRING, + number=12, + ) + + +class CreateInstancePartitionMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + + Attributes: + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + The instance partition being created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + """ + + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class CreateInstancePartitionRequest(proto.Message): + r"""The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + + Attributes: + parent (str): + Required. The name of the instance in which to create the + instance partition. Values are of the form + ``projects//instances/``. + instance_partition_id (str): + Required. The ID of the instance partition to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and + must be between 2 and 64 characters in length. + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to create. The + instance_partition.name may be omitted, but if specified + must be + ``/instancePartitions/``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_partition_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=3, + message='InstancePartition', + ) + + +class DeleteInstancePartitionRequest(proto.Message): + r"""The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + + Attributes: + name (str): + Required. The name of the instance partition to be deleted. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` + etag (str): + Optional. If not empty, the API only deletes + the instance partition when the etag provided + matches the current status of the requested + instance partition. Otherwise, deletes the + instance partition without checking the current + status of the requested instance partition. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstancePartitionRequest(proto.Message): + r"""The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + + Attributes: + name (str): + Required. The name of the requested instance partition. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateInstancePartitionRequest(proto.Message): + r"""The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + + Attributes: + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to update, which must + always include the instance partition name. Otherwise, only + fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] + need be included. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + should be updated. The field mask must always be specified; + this prevents any future fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + from being erased accidentally by clients that do not know + about them. + """ + + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + field_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class UpdateInstancePartitionMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + + Attributes: + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + The desired end state of the update. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + """ + + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancePartitionsRequest(proto.Message): + r"""The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Attributes: + parent (str): + Required. The instance whose instance partitions should be + listed. Values are of the form + ``projects//instances/``. Use + ``{instance} = '-'`` to list instance partitions for all + Instances in a project, e.g., + ``projects/myproject/instances/-``. + page_size (int): + Number of instance partitions to be returned + in the response. If 0 or less, defaults to the + server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.next_page_token] + from a previous + [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. + instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): + Optional. Deadline used while retrieving metadata for + instance partitions. Instance partitions whose metadata + cannot be retrieved within this deadline will be added to + [unreachable][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.unreachable] + in + [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancePartitionsResponse(proto.Message): + r"""The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Attributes: + instance_partitions (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstancePartition]): + The list of requested instancePartitions. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions] + call to fetch more of the matching instance partitions. + unreachable (MutableSequence[str]): + The list of unreachable instances or instance partitions. It + includes the names of instances or instance partitions whose + metadata could not be retrieved within + [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionsRequest.instance_partition_deadline]. + """ + + @property + def raw_page(self): + return self + + instance_partitions: MutableSequence['InstancePartition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListInstancePartitionOperationsRequest(proto.Message): + r"""The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Attributes: + parent (str): + Required. The parent instance of the instance partition + operations. Values are of the form + ``projects//instances/``. + filter (str): + Optional. An expression that filters the list of returned + operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the Operation are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata] + is + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first, if filtering + on metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=`` + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata) AND`` + ``(metadata.instance_partition.name:custom-instance-partition) AND`` + ``(metadata.start_time < \"2021-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + - The instance partition name contains + "custom-instance-partition". + - The operation started before 2021-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Optional. Number of operations to be returned + in the response. If 0 or less, defaults to the + server's maximum allowed page size. + page_token (str): + Optional. If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.next_page_token] + from a previous + [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse] + to the same ``parent`` and with the same ``filter``. + instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): + Optional. Deadline used while retrieving metadata for + instance partition operations. Instance partitions whose + operation metadata cannot be retrieved within this deadline + will be added to + [unreachable_instance_partitions][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.unreachable_instance_partitions] + in + [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancePartitionOperationsResponse(proto.Message): + r"""The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching instance partition long-running + operations. Each operation's name will be prefixed by the + instance partition's name. The operation's metadata field + type ``metadata.type_url`` describes the type of the + metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations] + call to fetch more of the matching metadata. + unreachable_instance_partitions (MutableSequence[str]): + The list of unreachable instance partitions. It includes the + names of instance partitions whose operation metadata could + not be retrieved within + [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.instance_partition_deadline]. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_instance_partitions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class MoveInstanceRequest(proto.Message): + r"""The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + Attributes: + name (str): + Required. The instance to move. Values are of the form + ``projects//instances/``. + target_config (str): + Required. The target instance configuration where to move + the instance. Values are of the form + ``projects//instanceConfigs/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + target_config: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MoveInstanceResponse(proto.Message): + r"""The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + + +class MoveInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + Attributes: + target_config (str): + The target instance configuration where to move the + instance. Values are of the form + ``projects//instanceConfigs/``. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance] + operation. + [progress_percent][google.spanner.admin.instance.v1.OperationProgress.progress_percent] + is reset when cancellation is requested. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + target_config: str = proto.Field( + proto.STRING, + number=1, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py new file mode 100644 index 0000000000..5918b3e5f7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] + +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-spanner-admin-instance" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=PREVIEW_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json new file mode 100644 index 0000000000..f58e9794e2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -0,0 +1,3450 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.spanner.admin.instance.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-spanner-admin-instance", + "version": "0.0.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "instance_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance_config" + }, + "description": "Sample for CreateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_create_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "instance_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance_config" + }, + "description": "Sample for CreateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_create_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "instance_partition_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance_partition" + }, + "description": "Sample for CreateInstancePartition", + "file": "spanner_v1_generated_instance_admin_create_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "instance_partition_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance_partition" + }, + "description": "Sample for CreateInstancePartition", + "file": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "spanner_v1_generated_instance_admin_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "spanner_v1_generated_instance_admin_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_config" + }, + "description": "Sample for DeleteInstanceConfig", + "file": "spanner_v1_generated_instance_admin_delete_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_config" + }, + "description": "Sample for DeleteInstanceConfig", + "file": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_partition" + }, + "description": "Sample for DeleteInstancePartition", + "file": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_partition" + }, + "description": "Sample for DeleteInstancePartition", + "file": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "spanner_v1_generated_instance_admin_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "spanner_v1_generated_instance_admin_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_instance_admin_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", + "shortName": "get_instance_config" + }, + "description": "Sample for GetInstanceConfig", + "file": "spanner_v1_generated_instance_admin_get_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", + "shortName": "get_instance_config" + }, + "description": "Sample for GetInstanceConfig", + "file": "spanner_v1_generated_instance_admin_get_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", + "shortName": "get_instance_partition" + }, + "description": "Sample for GetInstancePartition", + "file": "spanner_v1_generated_instance_admin_get_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", + "shortName": "get_instance_partition" + }, + "description": "Sample for GetInstancePartition", + "file": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "spanner_v1_generated_instance_admin_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "spanner_v1_generated_instance_admin_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_config_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager", + "shortName": "list_instance_config_operations" + }, + "description": "Sample for ListInstanceConfigOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_config_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager", + "shortName": "list_instance_config_operations" + }, + "description": "Sample for ListInstanceConfigOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_configs", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager", + "shortName": "list_instance_configs" + }, + "description": "Sample for ListInstanceConfigs", + "file": "spanner_v1_generated_instance_admin_list_instance_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_configs", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager", + "shortName": "list_instance_configs" + }, + "description": "Sample for ListInstanceConfigs", + "file": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partition_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitionOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager", + "shortName": "list_instance_partition_operations" + }, + "description": "Sample for ListInstancePartitionOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partition_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitionOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager", + "shortName": "list_instance_partition_operations" + }, + "description": "Sample for ListInstancePartitionOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partitions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager", + "shortName": "list_instance_partitions" + }, + "description": "Sample for ListInstancePartitions", + "file": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partitions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager", + "shortName": "list_instance_partitions" + }, + "description": "Sample for ListInstancePartitions", + "file": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instances", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "spanner_v1_generated_instance_admin_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instances", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "spanner_v1_generated_instance_admin_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.move_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "spanner_v1_generated_instance_admin_move_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_move_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.move_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "spanner_v1_generated_instance_admin_move_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_move_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_instance_admin_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance_config" + }, + "description": "Sample for UpdateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_update_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance_config" + }, + "description": "Sample for UpdateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_update_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance_partition" + }, + "description": "Sample for UpdateInstancePartition", + "file": "spanner_v1_generated_instance_admin_update_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance_partition" + }, + "description": "Sample for UpdateInstancePartition", + "file": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "spanner_v1_generated_instance_admin_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "spanner_v1_generated_instance_admin_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_sync.py" + } + ] +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py new file mode 100644 index 0000000000..74bd640044 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py new file mode 100644 index 0000000000..c3f266e4c4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py new file mode 100644 index 0000000000..c5b7616534 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py new file mode 100644 index 0000000000..a22765f53f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py new file mode 100644 index 0000000000..5b5f2e0e26 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py new file mode 100644 index 0000000000..f43c5016b5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py new file mode 100644 index 0000000000..262da709aa --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py new file mode 100644 index 0000000000..df83d9e424 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_config(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py new file mode 100644 index 0000000000..9a9c4d7ca1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_config(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py new file mode 100644 index 0000000000..78ca44d6c2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_partition(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py new file mode 100644 index 0000000000..72249ef6c7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_partition(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py new file mode 100644 index 0000000000..613ac6c070 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + client.delete_instance(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py new file mode 100644 index 0000000000..a0b620ae4f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py new file mode 100644 index 0000000000..cc0d725a03 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py new file mode 100644 index 0000000000..059eb2a078 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py new file mode 100644 index 0000000000..9adfb51c2e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_config(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py new file mode 100644 index 0000000000..16e9d3c3c8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_config(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py new file mode 100644 index 0000000000..8e84abcf6e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_partition(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py new file mode 100644 index 0000000000..d617cbb382 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_partition(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py new file mode 100644 index 0000000000..4a246a5bf3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py new file mode 100644 index 0000000000..a0580fef7c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py new file mode 100644 index 0000000000..89213b3a2e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py new file mode 100644 index 0000000000..651b2f88ae --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py new file mode 100644 index 0000000000..a0f120277a --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py new file mode 100644 index 0000000000..9dedb973f1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitionOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py new file mode 100644 index 0000000000..b2a7549b29 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitionOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py new file mode 100644 index 0000000000..56adc152fe --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py new file mode 100644 index 0000000000..1e65552fc1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py new file mode 100644 index 0000000000..abe1a1affa --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstances_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py new file mode 100644 index 0000000000..f344baff11 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstances_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py new file mode 100644 index 0000000000..ce62120492 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_MoveInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_MoveInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py new file mode 100644 index 0000000000..4621200e0c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_MoveInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_MoveInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py new file mode 100644 index 0000000000..2443f2127d --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py new file mode 100644 index 0000000000..ba6401602f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py new file mode 100644 index 0000000000..aa0e05dde3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py new file mode 100644 index 0000000000..80b2a4dd21 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py new file mode 100644 index 0000000000..ecabbf5191 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py new file mode 100644 index 0000000000..f7ea78401c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py new file mode 100644 index 0000000000..1d184f6c58 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py new file mode 100644 index 0000000000..42d3c484f8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py new file mode 100644 index 0000000000..56cd2760a1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py new file mode 100644 index 0000000000..2340e701e1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py new file mode 100644 index 0000000000..8200af5099 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py @@ -0,0 +1,196 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spanner_admin_instanceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'create_instance_config': ('parent', 'instance_config_id', 'instance_config', 'validate_only', ), + 'create_instance_partition': ('parent', 'instance_partition_id', 'instance_partition', ), + 'delete_instance': ('name', ), + 'delete_instance_config': ('name', 'etag', 'validate_only', ), + 'delete_instance_partition': ('name', 'etag', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_instance': ('name', 'field_mask', ), + 'get_instance_config': ('name', ), + 'get_instance_partition': ('name', ), + 'list_instance_config_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_instance_configs': ('parent', 'page_size', 'page_token', ), + 'list_instance_partition_operations': ('parent', 'filter', 'page_size', 'page_token', 'instance_partition_deadline', ), + 'list_instance_partitions': ('parent', 'page_size', 'page_token', 'instance_partition_deadline', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'instance_deadline', ), + 'move_instance': ('name', 'target_config', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_instance': ('instance', 'field_mask', ), + 'update_instance_config': ('instance_config', 'update_mask', 'validate_only', ), + 'update_instance_partition': ('instance_partition', 'field_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spanner_admin_instanceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner_admin_instance client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_instance/v1/setup.py b/owl-bot-staging/spanner_admin_instance/v1/setup.py new file mode 100644 index 0000000000..8d7bb912c2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/setup.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-spanner-admin-instance' + + +description = "Google Cloud Spanner Admin Instance API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/spanner_admin_instance/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-instance" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..5b1ee6c35a --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt @@ -0,0 +1,13 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py new file mode 100644 index 0000000000..82c9940cf7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -0,0 +1,17636 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import re +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminAsyncClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports +from google.cloud.spanner_admin_instance_v1.types import common +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceAdminClient._get_default_mtls_endpoint(None) is None + assert InstanceAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert InstanceAdminClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + InstanceAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert InstanceAdminClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert InstanceAdminClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert InstanceAdminClient._get_client_cert_source(None, False) is None + assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert InstanceAdminClient._get_client_cert_source(None, True) is mock_default_cert_source + assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = InstanceAdminClient._DEFAULT_UNIVERSE + default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert InstanceAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + assert InstanceAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert InstanceAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert InstanceAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert InstanceAdminClient._get_universe_domain(None, None) == InstanceAdminClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + InstanceAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceAdminClient, "grpc"), + (InstanceAdminAsyncClient, "grpc_asyncio"), + (InstanceAdminClient, "rest"), +]) +def test_instance_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstanceAdminGrpcTransport, "grpc"), + (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.InstanceAdminRestTransport, "rest"), +]) +def test_instance_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceAdminClient, "grpc"), + (InstanceAdminAsyncClient, "grpc_asyncio"), + (InstanceAdminClient, "rest"), +]) +def test_instance_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +def test_instance_admin_client_get_transport_class(): + transport = InstanceAdminClient.get_transport_class() + available_transports = [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminRestTransport, + ] + assert transport in available_transports + + transport = InstanceAdminClient.get_transport_class("grpc") + assert transport == transports.InstanceAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), +]) +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +def test_instance_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "true"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "false"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "true"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "false"), +]) +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InstanceAdminClient, InstanceAdminAsyncClient +]) +@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) +def test_instance_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + InstanceAdminClient, InstanceAdminAsyncClient +]) +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +def test_instance_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = InstanceAdminClient._DEFAULT_UNIVERSE + default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), +]) +def test_instance_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", None), +]) +def test_instance_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_instance_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = InstanceAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_instance_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=None, + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigsRequest, + dict, +]) +def test_list_instance_configs(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_instance_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstanceConfigsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_instance_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc + request = {} + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_configs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_configs] = mock_rpc + + request = {} + await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_configs_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_instance_configs_async_from_dict(): + await test_list_instance_configs_async(request_type=dict) + +def test_list_instance_configs_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_configs_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) + await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_configs_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_configs_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_configs_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_configs_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + + +def test_list_instance_configs_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_configs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in results) +def test_list_instance_configs_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_configs_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_configs_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceConfigRequest, + dict, +]) +def test_get_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + ) + response = client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED + assert response.base_config == 'base_config_value' + assert response.etag == 'etag_value' + assert response.leader_options == ['leader_options_value'] + assert response.reconciling is True + assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING + assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE + assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION + assert response.storage_limit_per_processing_unit == 3540 + + +def test_get_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.GetInstanceConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceConfigRequest( + name='name_value', + ) + +def test_get_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc + request = {} + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance_config] = mock_rpc + + request = {} + await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + )) + response = await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED + assert response.base_config == 'base_config_value' + assert response.etag == 'etag_value' + assert response.leader_options == ['leader_options_value'] + assert response.reconciling is True + assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING + assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE + assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION + assert response.storage_limit_per_processing_unit == 3540 + + +@pytest.mark.asyncio +async def test_get_instance_config_async_from_dict(): + await test_get_instance_config_async(request_type=dict) + +def test_get_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = spanner_instance_admin.InstanceConfig() + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) + await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceConfigRequest, + dict, +]) +def test_create_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.CreateInstanceConfigRequest( + parent='parent_value', + instance_config_id='instance_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceConfigRequest( + parent='parent_value', + instance_config_id='instance_config_id_value', + ) + +def test_create_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc + request = {} + client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance_config] = mock_rpc + + request = {} + await client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_config_async_from_dict(): + await test_create_instance_config_async(request_type=dict) + +def test_create_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance_config( + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].instance_config_id + mock_val = 'instance_config_id_value' + assert arg == mock_val + + +def test_create_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_config( + spanner_instance_admin.CreateInstanceConfigRequest(), + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + +@pytest.mark.asyncio +async def test_create_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance_config( + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].instance_config_id + mock_val = 'instance_config_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance_config( + spanner_instance_admin.CreateInstanceConfigRequest(), + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceConfigRequest, + dict, +]) +def test_update_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.UpdateInstanceConfigRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceConfigRequest( + ) + +def test_update_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc + request = {} + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance_config] = mock_rpc + + request = {} + await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_config_async_from_dict(): + await test_update_instance_config_async(request_type=dict) + +def test_update_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceConfigRequest() + + request.instance_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_config.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceConfigRequest() + + request.instance_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_config.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance_config( + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_config( + spanner_instance_admin.UpdateInstanceConfigRequest(), + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance_config( + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance_config( + spanner_instance_admin.UpdateInstanceConfigRequest(), + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceConfigRequest, + dict, +]) +def test_delete_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.DeleteInstanceConfigRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceConfigRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc + request = {} + client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance_config] = mock_rpc + + request = {} + await client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_config_async_from_dict(): + await test_delete_instance_config_async(request_type=dict) + +def test_delete_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value = None + client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_config( + spanner_instance_admin.DeleteInstanceConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance_config( + spanner_instance_admin.DeleteInstanceConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigOperationsRequest, + dict, +]) +def test_list_instance_config_operations(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_instance_config_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstanceConfigOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_config_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_instance_config_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_config_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc + request = {} + client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_config_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_config_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_config_operations] = mock_rpc + + request = {} + await client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_config_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_from_dict(): + await test_list_instance_config_operations_async(request_type=dict) + +def test_list_instance_config_operations_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_config_operations_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) + await client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_config_operations_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_config_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_config_operations_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_config_operations( + spanner_instance_admin.ListInstanceConfigOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_config_operations_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_config_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_config_operations_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_config_operations( + spanner_instance_admin.ListInstanceConfigOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_config_operations_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_config_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_instance_config_operations_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_config_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_config_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_config_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancesRequest, + dict, +]) +def test_list_instances(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instances in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancesRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + +def test_list_instances_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instances_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in results) +def test_list_instances_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionsRequest, + dict, +]) +def test_list_instance_partitions(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instance_partitions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstancePartitionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_partitions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancePartitionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_instance_partitions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc + request = {} + client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_partitions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_partitions] = mock_rpc + + request = {} + await client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partitions_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_from_dict(): + await test_list_instance_partitions_async(request_type=dict) + +def test_list_instance_partitions_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_partitions_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) + await client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_partitions_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_partitions_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partitions( + spanner_instance_admin.ListInstancePartitionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_partitions_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_partitions_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_partitions( + spanner_instance_admin.ListInstancePartitionsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partitions_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_partitions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstancePartition) + for i in results) +def test_list_instance_partitions_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_partitions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_partitions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.InstancePartition) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_partitions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceRequest, + dict, +]) +def test_get_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED + assert response.endpoint_uris == ['endpoint_uris_value'] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD + assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE + + +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.GetInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceRequest( + name='name_value', + ) + +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + )) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED + assert response.endpoint_uris == ['endpoint_uris_value'] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD + assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + +def test_get_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = spanner_instance_admin.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceRequest, + dict, +]) +def test_create_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + +def test_create_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceRequest, + dict, +]) +def test_update_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.UpdateInstanceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceRequest( + ) + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + +def test_update_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceRequest() + + request.instance.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceRequest() + + request.instance.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceRequest, + dict, +]) +def test_delete_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.DeleteInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceRequest( + name='name_value', + ) + +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + +def test_delete_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = None + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + +def test_test_iam_permissions_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstancePartitionRequest, + dict, +]) +def test_get_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + node_count=1070, + ) + response = client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstancePartition) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.state == spanner_instance_admin.InstancePartition.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.referencing_backups == ['referencing_backups_value'] + assert response.etag == 'etag_value' + + +def test_get_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.GetInstancePartitionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstancePartitionRequest( + name='name_value', + ) + +def test_get_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc + request = {} + client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance_partition] = mock_rpc + + request = {} + await client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + )) + response = await client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstancePartition) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.state == spanner_instance_admin.InstancePartition.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.referencing_backups == ['referencing_backups_value'] + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_instance_partition_async_from_dict(): + await test_get_instance_partition_async(request_type=dict) + +def test_get_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value = spanner_instance_admin.InstancePartition() + client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) + await client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstancePartition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_partition( + spanner_instance_admin.GetInstancePartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstancePartition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_partition( + spanner_instance_admin.GetInstancePartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstancePartitionRequest, + dict, +]) +def test_create_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.CreateInstancePartitionRequest( + parent='parent_value', + instance_partition_id='instance_partition_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstancePartitionRequest( + parent='parent_value', + instance_partition_id='instance_partition_id_value', + ) + +def test_create_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc + request = {} + client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance_partition] = mock_rpc + + request = {} + await client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_partition_async_from_dict(): + await test_create_instance_partition_async(request_type=dict) + +def test_create_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstancePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstancePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance_partition( + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].instance_partition_id + mock_val = 'instance_partition_id_value' + assert arg == mock_val + + +def test_create_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_partition( + spanner_instance_admin.CreateInstancePartitionRequest(), + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + +@pytest.mark.asyncio +async def test_create_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance_partition( + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].instance_partition_id + mock_val = 'instance_partition_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance_partition( + spanner_instance_admin.CreateInstancePartitionRequest(), + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstancePartitionRequest, + dict, +]) +def test_delete_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.DeleteInstancePartitionRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstancePartitionRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc + request = {} + client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance_partition] = mock_rpc + + request = {} + await client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_partition_async_from_dict(): + await test_delete_instance_partition_async(request_type=dict) + +def test_delete_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value = None + client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_partition( + spanner_instance_admin.DeleteInstancePartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance_partition( + spanner_instance_admin.DeleteInstancePartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstancePartitionRequest, + dict, +]) +def test_update_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.UpdateInstancePartitionRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstancePartitionRequest( + ) + +def test_update_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc + request = {} + client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance_partition] = mock_rpc + + request = {} + await client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_partition_async_from_dict(): + await test_update_instance_partition_async(request_type=dict) + +def test_update_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstancePartitionRequest() + + request.instance_partition.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_partition.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstancePartitionRequest() + + request.instance_partition.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_partition.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance_partition( + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_partition( + spanner_instance_admin.UpdateInstancePartitionRequest(), + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance_partition( + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance_partition( + spanner_instance_admin.UpdateInstancePartitionRequest(), + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionOperationsRequest, + dict, +]) +def test_list_instance_partition_operations(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + ) + response = client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionOperationsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] + + +def test_list_instance_partition_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstancePartitionOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_partition_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancePartitionOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_instance_partition_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc + request = {} + client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partition_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_partition_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_partition_operations] = mock_rpc + + request = {} + await client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_partition_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + )) + response = await client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] + + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_from_dict(): + await test_list_instance_partition_operations_async(request_type=dict) + +def test_list_instance_partition_operations_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) + await client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_partition_operations_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_partition_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_partition_operations_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partition_operations( + spanner_instance_admin.ListInstancePartitionOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_partition_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_partition_operations( + spanner_instance_admin.ListInstancePartitionOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partition_operations_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_partition_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_instance_partition_operations_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_partition_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_partition_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_partition_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.MoveInstanceRequest, + dict, +]) +def test_move_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.MoveInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_move_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.MoveInstanceRequest( + name='name_value', + target_config='target_config_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.move_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.MoveInstanceRequest( + name='name_value', + target_config='target_config_value', + ) + +def test_move_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + request = {} + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_move_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.move_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.move_instance] = mock_rpc + + request = {} + await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_move_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.MoveInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.MoveInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_move_instance_async_from_dict(): + await test_move_instance_async(request_type=dict) + +def test_move_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.MoveInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_move_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.MoveInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_list_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc + + request = {} + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_configs_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_configs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_configs_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_configs_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) + + +def test_list_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + + +def test_list_instance_configs_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstanceConfigsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_instance_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in results) + + pages = list(client.list_instance_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc + + request = {} + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_config_rest_required_fields(request_type=spanner_instance_admin.GetInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstanceConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstanceConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) + + +def test_get_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + + +def test_create_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc + + request = {} + client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_config_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceConfigId"] = 'instance_config_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceConfigId" in jsonified_request + assert jsonified_request["instanceConfigId"] == 'instance_config_id_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "instanceConfigId", "instanceConfig", ))) + + +def test_create_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) + + +def test_create_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_config( + spanner_instance_admin.CreateInstanceConfigRequest(), + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + +def test_update_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc + + request = {} + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_config_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceConfig", "updateMask", ))) + + +def test_update_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} + + # get truthy value for each flattened field + mock_args = dict( + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance_config.name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) + + +def test_update_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_config( + spanner_instance_admin.UpdateInstanceConfigRequest(), + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc + + request = {} + client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_config_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) + + +def test_delete_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_config( + spanner_instance_admin.DeleteInstanceConfigRequest(), + name='name_value', + ) + + +def test_list_instance_config_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_config_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc + + request = {} + client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_config_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_config_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_config_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_config_operations_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_config_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_config_operations_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_config_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigOperations" % client.transport._host, args[1]) + + +def test_list_instance_config_operations_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_config_operations( + spanner_instance_admin.ListInstanceConfigOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_config_operations_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_instance_config_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_instance_config_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields(request_type=spanner_instance_admin.ListInstancesRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "instance_deadline", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instances(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "instanceDeadline", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instances_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_instance_partitions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc + + request = {} + client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_partitions_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_partition_deadline", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_partitions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_partitions_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_partitions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_partitions_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_partitions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) + + +def test_list_instance_partitions_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partitions( + spanner_instance_admin.ListInstancePartitionsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partitions_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstancePartitionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_instance_partitions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstancePartition) + for i in results) + + pages = list(client.list_instance_partitions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields(request_type=spanner_instance_admin.GetInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("field_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("fieldMask", )) & set(("name", ))) + + +def test_get_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "instanceId", "instance", ))) + + +def test_create_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) + + +def test_create_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance", "fieldMask", ))) + + +def test_update_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance': {'name': 'projects/sample1/instances/sample2'}} + + # get truthy value for each flattened field + mock_args = dict( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance.name=projects/*/instances/*}" % client.transport._host, args[1]) + + +def test_update_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) + + +def test_delete_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) + + +def test_set_iam_policy_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", ))) + + +def test_get_iam_policy_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + jsonified_request["permissions"] = 'permissions_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == 'permissions_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) + + +def test_test_iam_permissions_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + permissions=['permissions_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_get_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc + + request = {} + client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_partition_rest_required_fields(request_type=spanner_instance_admin.GetInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstancePartition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstancePartition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstancePartition() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstancePartition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) + + +def test_get_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_partition( + spanner_instance_admin.GetInstancePartitionRequest(), + name='name_value', + ) + + +def test_create_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc + + request = {} + client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_partition_rest_required_fields(request_type=spanner_instance_admin.CreateInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_partition_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instancePartitionId"] = 'instance_partition_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instancePartitionId" in jsonified_request + assert jsonified_request["instancePartitionId"] == 'instance_partition_id_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "instancePartitionId", "instancePartition", ))) + + +def test_create_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) + + +def test_create_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_partition( + spanner_instance_admin.CreateInstancePartitionRequest(), + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + +def test_delete_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc + + request = {} + client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_partition_rest_required_fields(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) + + +def test_delete_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_partition( + spanner_instance_admin.DeleteInstancePartitionRequest(), + name='name_value', + ) + + +def test_update_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc + + request = {} + client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_partition_rest_required_fields(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instancePartition", "fieldMask", ))) + + +def test_update_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) + + +def test_update_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_partition( + spanner_instance_admin.UpdateInstancePartitionRequest(), + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_list_instance_partition_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc + + request = {} + client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partition_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_partition_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "instance_partition_deadline", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_partition_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_partition_operations_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_partition_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_partition_operations_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_partition_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitionOperations" % client.transport._host, args[1]) + + +def test_list_instance_partition_operations_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partition_operations( + spanner_instance_admin.ListInstancePartitionOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partition_operations_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_instance_partition_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_instance_partition_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_move_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + + request = {} + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_move_instance_rest_required_fields(request_type=spanner_instance_admin.MoveInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request_init["target_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["targetConfig"] = 'target_config_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "targetConfig" in jsonified_request + assert jsonified_request["targetConfig"] == 'target_config_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.move_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "targetConfig", ))) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.InstanceAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + transports.InstanceAdminRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = InstanceAdminClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_configs_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + client.list_instance_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = spanner_instance_admin.InstanceConfig() + client.get_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value = None + client.delete_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_config_operations_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + client.list_instance_config_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancesResponse() + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partitions_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + client.list_instance_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = spanner_instance_admin.Instance() + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = None + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value = spanner_instance_admin.InstancePartition() + client.get_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value = None + client.delete_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partition_operations_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + client.list_instance_partition_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.move_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.MoveInstanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = InstanceAdminAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_configs_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + )) + await client.list_instance_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + )) + await client.get_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_config_operations_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + )) + await client.list_instance_config_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_partitions_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instance_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + )) + await client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + )) + await client.get_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_partition_operations_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + )) + await client.list_instance_partition_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_move_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.move_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.MoveInstanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = InstanceAdminClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_instance_configs_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_configs(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigsRequest, + dict, +]) +def test_list_instance_configs_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_configs_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstanceConfigsRequest.pb(spanner_instance_admin.ListInstanceConfigsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstanceConfigsResponse.to_json(spanner_instance_admin.ListInstanceConfigsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstanceConfigsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigsResponse(), metadata + + client.list_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_config_rest_bad_request(request_type=spanner_instance_admin.GetInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceConfigRequest, + dict, +]) +def test_get_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED + assert response.base_config == 'base_config_value' + assert response.etag == 'etag_value' + assert response.leader_options == ['leader_options_value'] + assert response.reconciling is True + assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING + assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE + assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION + assert response.storage_limit_per_processing_unit == 3540 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.GetInstanceConfigRequest.pb(spanner_instance_admin.GetInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.InstanceConfig.to_json(spanner_instance_admin.InstanceConfig()) + req.return_value.content = return_value + + request = spanner_instance_admin.GetInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.InstanceConfig() + post_with_metadata.return_value = spanner_instance_admin.InstanceConfig(), metadata + + client.get_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_config_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceConfigRequest, + dict, +]) +def test_create_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.CreateInstanceConfigRequest.pb(spanner_instance_admin.CreateInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.CreateInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_config_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceConfigRequest, + dict, +]) +def test_update_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.UpdateInstanceConfigRequest.pb(spanner_instance_admin.UpdateInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.UpdateInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_config_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceConfigRequest, + dict, +]) +def test_delete_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_config") as pre: + pre.assert_not_called() + pb_message = spanner_instance_admin.DeleteInstanceConfigRequest.pb(spanner_instance_admin.DeleteInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_instance_admin.DeleteInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_instance_config_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_config_operations(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigOperationsRequest, + dict, +]) +def test_list_instance_config_operations_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_config_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_config_operations_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_config_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(spanner_instance_admin.ListInstanceConfigOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(spanner_instance_admin.ListInstanceConfigOperationsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse(), metadata + + client.list_instance_config_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_instances_rest_bad_request(request_type=spanner_instance_admin.ListInstancesRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instances(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstancesRequest.pb(spanner_instance_admin.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstancesResponse.to_json(spanner_instance_admin.ListInstancesResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstancesResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstancesResponse(), metadata + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_instance_partitions_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_partitions(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionsRequest, + dict, +]) +def test_list_instance_partitions_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_partitions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_partitions_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partitions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstancePartitionsRequest.pb(spanner_instance_admin.ListInstancePartitionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstancePartitionsResponse.to_json(spanner_instance_admin.ListInstancePartitionsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstancePartitionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionsResponse(), metadata + + client.list_instance_partitions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=spanner_instance_admin.GetInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED + assert response.endpoint_uris == ['endpoint_uris_value'] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD + assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.GetInstanceRequest.pb(spanner_instance_admin.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.Instance.to_json(spanner_instance_admin.Instance()) + req.return_value.content = return_value + + request = spanner_instance_admin.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.Instance() + post_with_metadata.return_value = spanner_instance_admin.Instance(), metadata + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.CreateInstanceRequest.pb(spanner_instance_admin.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.UpdateInstanceRequest.pb(spanner_instance_admin.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + pb_message = spanner_instance_admin.DeleteInstanceRequest.pb(spanner_instance_admin.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_instance_admin.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) + req.return_value.content = return_value + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_partition_rest_bad_request(request_type=spanner_instance_admin.GetInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstancePartitionRequest, + dict, +]) +def test_get_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + node_count=1070, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstancePartition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance_partition(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstancePartition) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.state == spanner_instance_admin.InstancePartition.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.referencing_backups == ['referencing_backups_value'] + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.GetInstancePartitionRequest.pb(spanner_instance_admin.GetInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.InstancePartition.to_json(spanner_instance_admin.InstancePartition()) + req.return_value.content = return_value + + request = spanner_instance_admin.GetInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.InstancePartition() + post_with_metadata.return_value = spanner_instance_admin.InstancePartition(), metadata + + client.get_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_partition_rest_bad_request(request_type=spanner_instance_admin.CreateInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstancePartitionRequest, + dict, +]) +def test_create_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance_partition(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.CreateInstancePartitionRequest.pb(spanner_instance_admin.CreateInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.CreateInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_partition_rest_bad_request(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstancePartitionRequest, + dict, +]) +def test_delete_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance_partition(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_partition") as pre: + pre.assert_not_called() + pb_message = spanner_instance_admin.DeleteInstancePartitionRequest.pb(spanner_instance_admin.DeleteInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_instance_admin.DeleteInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_update_instance_partition_rest_bad_request(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstancePartitionRequest, + dict, +]) +def test_update_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance_partition(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.UpdateInstancePartitionRequest.pb(spanner_instance_admin.UpdateInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.UpdateInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_instance_partition_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_partition_operations(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionOperationsRequest, + dict, +]) +def test_list_instance_partition_operations_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_partition_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionOperationsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_partition_operations_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partition_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(spanner_instance_admin.ListInstancePartitionOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(spanner_instance_admin.ListInstancePartitionOperationsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse(), metadata + + client.list_instance_partition_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_move_instance_rest_bad_request(request_type=spanner_instance_admin.MoveInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.move_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.MoveInstanceRequest, + dict, +]) +def test_move_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.move_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_move_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.MoveInstanceRequest.pb(spanner_instance_admin.MoveInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.MoveInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.move_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_configs_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + client.list_instance_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + client.get_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + client.create_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + client.update_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + client.delete_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_config_operations_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + client.list_instance_config_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partitions_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + client.list_instance_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + client.get_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + client.create_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + client.delete_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + client.update_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partition_operations_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + client.list_instance_partition_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + client.move_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.MoveInstanceRequest() + + assert args[0] == request_msg + + +def test_instance_admin_rest_lro_client(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.InstanceAdminGrpcTransport, + ) + +def test_instance_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instance_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstanceAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instance_configs', + 'get_instance_config', + 'create_instance_config', + 'update_instance_config', + 'delete_instance_config', + 'list_instance_config_operations', + 'list_instances', + 'list_instance_partitions', + 'get_instance', + 'create_instance', + 'update_instance', + 'delete_instance', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_instance_partition', + 'create_instance_partition', + 'delete_instance_partition', + 'update_instance_partition', + 'list_instance_partition_operations', + 'move_instance', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id="octopus", + ) + + +def test_instance_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceAdminTransport() + adc.assert_called_once() + + +def test_instance_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + ], +) +def test_instance_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + transports.InstanceAdminRestTransport, + ], +) +def test_instance_admin_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.InstanceAdminGrpcTransport, grpc_helpers), + (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_instance_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_instance_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InstanceAdminRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_instance_admin_host_no_port(transport_name): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_instance_admin_host_with_port(transport_name): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instance_configs._session + session2 = client2.transport.list_instance_configs._session + assert session1 != session2 + session1 = client1.transport.get_instance_config._session + session2 = client2.transport.get_instance_config._session + assert session1 != session2 + session1 = client1.transport.create_instance_config._session + session2 = client2.transport.create_instance_config._session + assert session1 != session2 + session1 = client1.transport.update_instance_config._session + session2 = client2.transport.update_instance_config._session + assert session1 != session2 + session1 = client1.transport.delete_instance_config._session + session2 = client2.transport.delete_instance_config._session + assert session1 != session2 + session1 = client1.transport.list_instance_config_operations._session + session2 = client2.transport.list_instance_config_operations._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.list_instance_partitions._session + session2 = client2.transport.list_instance_partitions._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.get_instance_partition._session + session2 = client2.transport.get_instance_partition._session + assert session1 != session2 + session1 = client1.transport.create_instance_partition._session + session2 = client2.transport.create_instance_partition._session + assert session1 != session2 + session1 = client1.transport.delete_instance_partition._session + session2 = client2.transport.delete_instance_partition._session + assert session1 != session2 + session1 = client1.transport.update_instance_partition._session + session2 = client2.transport.update_instance_partition._session + assert session1 != session2 + session1 = client1.transport.list_instance_partition_operations._session + session2 = client2.transport.list_instance_partition_operations._session + assert session1 != session2 + session1 = client1.transport.move_instance._session + session2 = client2.transport.move_instance._session + assert session1 != session2 +def test_instance_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InstanceAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_instance_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InstanceAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_instance_admin_grpc_lro_client(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_admin_grpc_lro_async_client(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + instance = "clam" + expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + actual = InstanceAdminClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "whelk", + "instance": "octopus", + } + path = InstanceAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_path(path) + assert expected == actual + +def test_instance_config_path(): + project = "oyster" + instance_config = "nudibranch" + expected = "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) + actual = InstanceAdminClient.instance_config_path(project, instance_config) + assert expected == actual + + +def test_parse_instance_config_path(): + expected = { + "project": "cuttlefish", + "instance_config": "mussel", + } + path = InstanceAdminClient.instance_config_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_config_path(path) + assert expected == actual + +def test_instance_partition_path(): + project = "winkle" + instance = "nautilus" + instance_partition = "scallop" + expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + actual = InstanceAdminClient.instance_partition_path(project, instance, instance_partition) + assert expected == actual + + +def test_parse_instance_partition_path(): + expected = { + "project": "abalone", + "instance": "squid", + "instance_partition": "clam", + } + path = InstanceAdminClient.instance_partition_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_partition_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstanceAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = InstanceAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstanceAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = InstanceAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstanceAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = InstanceAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = InstanceAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = InstanceAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstanceAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = InstanceAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = InstanceAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 2854ac3793a1d8a7216db4bed1a1792731323d1b Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Thu, 9 Oct 2025 03:03:14 +0000 Subject: [PATCH 2/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../database_admin/transports/base.py | 5 +- .../database_admin/transports/grpc.py | 8 +- .../database_admin/transports/grpc_asyncio.py | 8 +- .../database_admin/transports/rest.py | 5 +- .../instance_admin/transports/base.py | 5 +- .../instance_admin/transports/grpc.py | 8 +- .../instance_admin/transports/grpc_asyncio.py | 8 +- .../instance_admin/transports/rest.py | 5 +- .../services/spanner/transports/base.py | 5 +- .../services/spanner/transports/grpc.py | 8 +- .../spanner/transports/grpc_asyncio.py | 8 +- .../services/spanner/transports/rest.py | 5 +- owl-bot-staging/spanner/v1/.coveragerc | 13 - owl-bot-staging/spanner/v1/.flake8 | 34 - owl-bot-staging/spanner/v1/LICENSE | 202 - owl-bot-staging/spanner/v1/MANIFEST.in | 20 - owl-bot-staging/spanner/v1/README.rst | 143 - .../spanner/v1/docs/_static/custom.css | 20 - .../spanner/v1/docs/_templates/layout.html | 50 - owl-bot-staging/spanner/v1/docs/conf.py | 385 - owl-bot-staging/spanner/v1/docs/index.rst | 10 - .../spanner/v1/docs/multiprocessing.rst | 7 - .../spanner/v1/docs/spanner_v1/services_.rst | 6 - .../spanner/v1/docs/spanner_v1/spanner.rst | 10 - .../spanner/v1/docs/spanner_v1/types_.rst | 6 - .../v1/google/cloud/spanner/__init__.py | 113 - .../v1/google/cloud/spanner/gapic_version.py | 16 - .../spanner/v1/google/cloud/spanner/py.typed | 2 - .../v1/google/cloud/spanner_v1/__init__.py | 114 - .../cloud/spanner_v1/gapic_metadata.json | 268 - .../google/cloud/spanner_v1/gapic_version.py | 16 - .../v1/google/cloud/spanner_v1/py.typed | 2 - .../cloud/spanner_v1/services/__init__.py | 15 - .../spanner_v1/services/spanner/__init__.py | 22 - .../services/spanner/async_client.py | 2125 -- .../spanner_v1/services/spanner/client.py | 2486 -- .../spanner_v1/services/spanner/pagers.py | 166 - .../services/spanner/transports/README.rst | 9 - .../services/spanner/transports/__init__.py | 38 - .../services/spanner/transports/base.py | 505 - .../services/spanner/transports/grpc.py | 899 - .../spanner/transports/grpc_asyncio.py | 1125 - .../services/spanner/transports/rest.py | 2898 -- .../services/spanner/transports/rest_base.py | 817 - .../google/cloud/spanner_v1/types/__init__.py | 122 - .../cloud/spanner_v1/types/change_stream.py | 683 - .../cloud/spanner_v1/types/commit_response.py | 104 - .../v1/google/cloud/spanner_v1/types/keys.py | 248 - .../google/cloud/spanner_v1/types/mutation.py | 201 - .../cloud/spanner_v1/types/query_plan.py | 219 - .../cloud/spanner_v1/types/result_set.py | 379 - .../google/cloud/spanner_v1/types/spanner.py | 1782 -- .../cloud/spanner_v1/types/transaction.py | 491 - .../v1/google/cloud/spanner_v1/types/type.py | 288 - owl-bot-staging/spanner/v1/mypy.ini | 3 - owl-bot-staging/spanner/v1/noxfile.py | 601 - .../snippet_metadata_google.spanner.v1.json | 2579 -- ...ted_spanner_batch_create_sessions_async.py | 53 - ...ated_spanner_batch_create_sessions_sync.py | 53 - ..._v1_generated_spanner_batch_write_async.py | 57 - ...r_v1_generated_spanner_batch_write_sync.py | 57 - ...nerated_spanner_begin_transaction_async.py | 52 - ...enerated_spanner_begin_transaction_sync.py | 52 - ...anner_v1_generated_spanner_commit_async.py | 53 - ...panner_v1_generated_spanner_commit_sync.py | 53 - ..._generated_spanner_create_session_async.py | 52 - ...1_generated_spanner_create_session_sync.py | 52 - ..._generated_spanner_delete_session_async.py | 50 - ...1_generated_spanner_delete_session_sync.py | 50 - ...nerated_spanner_execute_batch_dml_async.py | 57 - ...enerated_spanner_execute_batch_dml_sync.py | 57 - ..._v1_generated_spanner_execute_sql_async.py | 53 - ...r_v1_generated_spanner_execute_sql_sync.py | 53 - ...ted_spanner_execute_streaming_sql_async.py | 54 - ...ated_spanner_execute_streaming_sql_sync.py | 54 - ..._v1_generated_spanner_get_session_async.py | 52 - ...r_v1_generated_spanner_get_session_sync.py | 52 - ...1_generated_spanner_list_sessions_async.py | 53 - ...v1_generated_spanner_list_sessions_sync.py | 53 - ...generated_spanner_partition_query_async.py | 53 - ..._generated_spanner_partition_query_sync.py | 53 - ..._generated_spanner_partition_read_async.py | 53 - ...1_generated_spanner_partition_read_sync.py | 53 - ...spanner_v1_generated_spanner_read_async.py | 54 - .../spanner_v1_generated_spanner_read_sync.py | 54 - ...ner_v1_generated_spanner_rollback_async.py | 51 - ...nner_v1_generated_spanner_rollback_sync.py | 51 - ..._generated_spanner_streaming_read_async.py | 55 - ...1_generated_spanner_streaming_read_sync.py | 55 - .../v1/scripts/fixup_spanner_v1_keywords.py | 191 - owl-bot-staging/spanner/v1/setup.py | 101 - .../spanner/v1/testing/constraints-3.10.txt | 8 - .../spanner/v1/testing/constraints-3.11.txt | 8 - .../spanner/v1/testing/constraints-3.12.txt | 8 - .../spanner/v1/testing/constraints-3.13.txt | 12 - .../spanner/v1/testing/constraints-3.14.txt | 12 - .../spanner/v1/testing/constraints-3.7.txt | 12 - .../spanner/v1/testing/constraints-3.8.txt | 8 - .../spanner/v1/testing/constraints-3.9.txt | 8 - owl-bot-staging/spanner/v1/tests/__init__.py | 16 - .../spanner/v1/tests/unit/__init__.py | 16 - .../spanner/v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/spanner_v1/__init__.py | 16 - .../unit/gapic/spanner_v1/test_spanner.py | 11446 -------- .../spanner_admin_database/v1/.coveragerc | 13 - .../spanner_admin_database/v1/.flake8 | 34 - .../spanner_admin_database/v1/LICENSE | 202 - .../spanner_admin_database/v1/MANIFEST.in | 20 - .../spanner_admin_database/v1/README.rst | 143 - .../v1/docs/_static/custom.css | 20 - .../v1/docs/_templates/layout.html | 50 - .../spanner_admin_database/v1/docs/conf.py | 385 - .../spanner_admin_database/v1/docs/index.rst | 10 - .../v1/docs/multiprocessing.rst | 7 - .../database_admin.rst | 10 - .../spanner_admin_database_v1/services_.rst | 6 - .../docs/spanner_admin_database_v1/types_.rst | 6 - .../cloud/spanner_admin_database/__init__.py | 149 - .../spanner_admin_database/gapic_version.py | 16 - .../cloud/spanner_admin_database/py.typed | 2 - .../spanner_admin_database_v1/__init__.py | 150 - .../gapic_metadata.json | 433 - .../gapic_version.py | 16 - .../cloud/spanner_admin_database_v1/py.typed | 2 - .../services/__init__.py | 15 - .../services/database_admin/__init__.py | 22 - .../services/database_admin/async_client.py | 3968 --- .../services/database_admin/client.py | 4387 --- .../services/database_admin/pagers.py | 864 - .../database_admin/transports/README.rst | 9 - .../database_admin/transports/__init__.py | 38 - .../database_admin/transports/base.py | 795 - .../database_admin/transports/grpc.py | 1285 - .../database_admin/transports/grpc_asyncio.py | 1656 -- .../database_admin/transports/rest.py | 5336 ---- .../database_admin/transports/rest_base.py | 1378 - .../types/__init__.py | 148 - .../spanner_admin_database_v1/types/backup.py | 1097 - .../types/backup_schedule.py | 369 - .../spanner_admin_database_v1/types/common.py | 179 - .../types/spanner_database_admin.py | 1348 - .../spanner_admin_database/v1/mypy.ini | 3 - .../spanner_admin_database/v1/noxfile.py | 601 - ...data_google.spanner.admin.database.v1.json | 4480 ---- ...d_database_admin_add_split_points_async.py | 52 - ...ed_database_admin_add_split_points_sync.py | 52 - ...erated_database_admin_copy_backup_async.py | 58 - ...nerated_database_admin_copy_backup_sync.py | 58 - ...ated_database_admin_create_backup_async.py | 57 - ...base_admin_create_backup_schedule_async.py | 53 - ...abase_admin_create_backup_schedule_sync.py | 53 - ...rated_database_admin_create_backup_sync.py | 57 - ...ed_database_admin_create_database_async.py | 57 - ...ted_database_admin_create_database_sync.py | 57 - ...ated_database_admin_delete_backup_async.py | 50 - ...base_admin_delete_backup_schedule_async.py | 50 - ...abase_admin_delete_backup_schedule_sync.py | 50 - ...rated_database_admin_delete_backup_sync.py | 50 - ...ated_database_admin_drop_database_async.py | 50 - ...rated_database_admin_drop_database_sync.py | 50 - ...nerated_database_admin_get_backup_async.py | 52 - ...atabase_admin_get_backup_schedule_async.py | 52 - ...database_admin_get_backup_schedule_sync.py | 52 - ...enerated_database_admin_get_backup_sync.py | 52 - ...rated_database_admin_get_database_async.py | 52 - ...d_database_admin_get_database_ddl_async.py | 52 - ...ed_database_admin_get_database_ddl_sync.py | 52 - ...erated_database_admin_get_database_sync.py | 52 - ...ted_database_admin_get_iam_policy_async.py | 53 - ...ated_database_admin_get_iam_policy_sync.py | 53 - ...n_internal_update_graph_operation_async.py | 54 - ...in_internal_update_graph_operation_sync.py | 54 - ...base_admin_list_backup_operations_async.py | 53 - ...abase_admin_list_backup_operations_sync.py | 53 - ...abase_admin_list_backup_schedules_async.py | 53 - ...tabase_admin_list_backup_schedules_sync.py | 53 - ...rated_database_admin_list_backups_async.py | 53 - ...erated_database_admin_list_backups_sync.py | 53 - ...se_admin_list_database_operations_async.py | 53 - ...ase_admin_list_database_operations_sync.py | 53 - ...atabase_admin_list_database_roles_async.py | 53 - ...database_admin_list_database_roles_sync.py | 53 - ...ted_database_admin_list_databases_async.py | 53 - ...ated_database_admin_list_databases_sync.py | 53 - ...d_database_admin_restore_database_async.py | 58 - ...ed_database_admin_restore_database_sync.py | 58 - ...ted_database_admin_set_iam_policy_async.py | 53 - ...ated_database_admin_set_iam_policy_sync.py | 53 - ...tabase_admin_test_iam_permissions_async.py | 54 - ...atabase_admin_test_iam_permissions_sync.py | 54 - ...ated_database_admin_update_backup_async.py | 51 - ...base_admin_update_backup_schedule_async.py | 51 - ...abase_admin_update_backup_schedule_sync.py | 51 - ...rated_database_admin_update_backup_sync.py | 51 - ...ed_database_admin_update_database_async.py | 59 - ...atabase_admin_update_database_ddl_async.py | 57 - ...database_admin_update_database_ddl_sync.py | 57 - ...ted_database_admin_update_database_sync.py | 59 - ...ixup_spanner_admin_database_v1_keywords.py | 202 - .../spanner_admin_database/v1/setup.py | 102 - .../v1/testing/constraints-3.10.txt | 9 - .../v1/testing/constraints-3.11.txt | 9 - .../v1/testing/constraints-3.12.txt | 9 - .../v1/testing/constraints-3.13.txt | 13 - .../v1/testing/constraints-3.7.txt | 13 - .../v1/testing/constraints-3.8.txt | 9 - .../v1/testing/constraints-3.9.txt | 9 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../spanner_admin_database_v1/__init__.py | 16 - .../test_database_admin.py | 22226 ---------------- .../spanner_admin_instance/v1/.coveragerc | 13 - .../spanner_admin_instance/v1/.flake8 | 34 - .../spanner_admin_instance/v1/LICENSE | 202 - .../spanner_admin_instance/v1/MANIFEST.in | 20 - .../spanner_admin_instance/v1/README.rst | 143 - .../v1/docs/_static/custom.css | 20 - .../v1/docs/_templates/layout.html | 50 - .../spanner_admin_instance/v1/docs/conf.py | 385 - .../spanner_admin_instance/v1/docs/index.rst | 10 - .../v1/docs/multiprocessing.rst | 7 - .../instance_admin.rst | 10 - .../spanner_admin_instance_v1/services_.rst | 6 - .../docs/spanner_admin_instance_v1/types_.rst | 6 - .../cloud/spanner_admin_instance/__init__.py | 109 - .../spanner_admin_instance/gapic_version.py | 16 - .../cloud/spanner_admin_instance/py.typed | 2 - .../spanner_admin_instance_v1/__init__.py | 110 - .../gapic_metadata.json | 343 - .../gapic_version.py | 16 - .../cloud/spanner_admin_instance_v1/py.typed | 2 - .../services/__init__.py | 15 - .../services/instance_admin/__init__.py | 22 - .../services/instance_admin/async_client.py | 3468 --- .../services/instance_admin/client.py | 3849 --- .../services/instance_admin/pagers.py | 723 - .../instance_admin/transports/README.rst | 9 - .../instance_admin/transports/__init__.py | 38 - .../instance_admin/transports/base.py | 567 - .../instance_admin/transports/grpc.py | 1359 - .../instance_admin/transports/grpc_asyncio.py | 1560 -- .../instance_admin/transports/rest.py | 4462 ---- .../instance_admin/transports/rest_base.py | 1152 - .../types/__init__.py | 104 - .../spanner_admin_instance_v1/types/common.py | 99 - .../types/spanner_instance_admin.py | 2366 -- .../spanner_admin_instance/v1/mypy.ini | 3 - .../spanner_admin_instance/v1/noxfile.py | 601 - ...data_google.spanner.admin.instance.v1.json | 3450 --- ...ed_instance_admin_create_instance_async.py | 63 - ...ance_admin_create_instance_config_async.py | 57 - ...tance_admin_create_instance_config_sync.py | 57 - ...e_admin_create_instance_partition_async.py | 64 - ...ce_admin_create_instance_partition_sync.py | 64 - ...ted_instance_admin_create_instance_sync.py | 63 - ...ed_instance_admin_delete_instance_async.py | 50 - ...ance_admin_delete_instance_config_async.py | 50 - ...tance_admin_delete_instance_config_sync.py | 50 - ...e_admin_delete_instance_partition_async.py | 50 - ...ce_admin_delete_instance_partition_sync.py | 50 - ...ted_instance_admin_delete_instance_sync.py | 50 - ...ted_instance_admin_get_iam_policy_async.py | 53 - ...ated_instance_admin_get_iam_policy_sync.py | 53 - ...rated_instance_admin_get_instance_async.py | 52 - ...nstance_admin_get_instance_config_async.py | 52 - ...instance_admin_get_instance_config_sync.py | 52 - ...ance_admin_get_instance_partition_async.py | 52 - ...tance_admin_get_instance_partition_sync.py | 52 - ...erated_instance_admin_get_instance_sync.py | 52 - ...n_list_instance_config_operations_async.py | 53 - ...in_list_instance_config_operations_sync.py | 53 - ...tance_admin_list_instance_configs_async.py | 53 - ...stance_admin_list_instance_configs_sync.py | 53 - ...ist_instance_partition_operations_async.py | 53 - ...list_instance_partition_operations_sync.py | 53 - ...ce_admin_list_instance_partitions_async.py | 53 - ...nce_admin_list_instance_partitions_sync.py | 53 - ...ted_instance_admin_list_instances_async.py | 53 - ...ated_instance_admin_list_instances_sync.py | 53 - ...ated_instance_admin_move_instance_async.py | 57 - ...rated_instance_admin_move_instance_sync.py | 57 - ...ted_instance_admin_set_iam_policy_async.py | 53 - ...ated_instance_admin_set_iam_policy_sync.py | 53 - ...stance_admin_test_iam_permissions_async.py | 54 - ...nstance_admin_test_iam_permissions_sync.py | 54 - ...ed_instance_admin_update_instance_async.py | 61 - ...ance_admin_update_instance_config_async.py | 55 - ...tance_admin_update_instance_config_sync.py | 55 - ...e_admin_update_instance_partition_async.py | 62 - ...ce_admin_update_instance_partition_sync.py | 62 - ...ted_instance_admin_update_instance_sync.py | 61 - ...ixup_spanner_admin_instance_v1_keywords.py | 196 - .../spanner_admin_instance/v1/setup.py | 102 - .../v1/testing/constraints-3.10.txt | 9 - .../v1/testing/constraints-3.11.txt | 9 - .../v1/testing/constraints-3.12.txt | 9 - .../v1/testing/constraints-3.13.txt | 13 - .../v1/testing/constraints-3.14.txt | 13 - .../v1/testing/constraints-3.7.txt | 13 - .../v1/testing/constraints-3.8.txt | 9 - .../v1/testing/constraints-3.9.txt | 9 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../spanner_admin_instance_v1/__init__.py | 16 - .../test_instance_admin.py | 17636 ------------ ...data_google.spanner.admin.database.v1.json | 2 +- ...data_google.spanner.admin.instance.v1.json | 2 +- .../snippet_metadata_google.spanner.v1.json | 2 +- testing/constraints-3.10.txt | 2 + testing/constraints-3.11.txt | 2 + testing/constraints-3.12.txt | 2 + testing/constraints-3.13.txt | 1 + .../testing => testing}/constraints-3.14.txt | 0 testing/constraints-3.8.txt | 2 + testing/constraints-3.9.txt | 2 + 317 files changed, 62 insertions(+), 134793 deletions(-) delete mode 100644 owl-bot-staging/spanner/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner/v1/.flake8 delete mode 100644 owl-bot-staging/spanner/v1/LICENSE delete mode 100644 owl-bot-staging/spanner/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner/v1/README.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/spanner/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/spanner/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py delete mode 100644 owl-bot-staging/spanner/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py delete mode 100644 owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py delete mode 100644 owl-bot-staging/spanner/v1/setup.py delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.14.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/spanner/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner_admin_database/v1/.flake8 delete mode 100644 owl-bot-staging/spanner_admin_database/v1/LICENSE delete mode 100644 owl-bot-staging/spanner_admin_database/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner_admin_database/v1/README.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner_admin_database/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/setup.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/.flake8 delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/LICENSE delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/README.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/setup.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py rename {owl-bot-staging/spanner_admin_database/v1/testing => testing}/constraints-3.14.txt (100%) diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py index 689f6afe96..16a075d983 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -81,9 +81,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py index 8f31a1fb98..3faaae0ca4 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -169,9 +169,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -305,9 +306,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py index 5171d84d40..dec153adc3 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -166,8 +166,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -218,9 +219,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py index df70fc5636..dfec442041 100644 --- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -1622,9 +1622,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py index 5a737b69f7..d8c055d60e 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -75,9 +75,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py index ee5b765210..09f5c4ed85 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -178,9 +178,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -314,9 +315,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py index f2df40d1f2..dc98e7c0d8 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -175,8 +175,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -227,9 +228,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py index ca32cafa99..feef4e8048 100644 --- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py +++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py @@ -1355,9 +1355,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py index d1dfe07291..3e68439cd7 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/base.py +++ b/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -76,9 +76,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py index 8b377d7725..20eb2373fd 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -160,9 +160,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -303,9 +304,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py index 2c6cec52a9..0bf612b242 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -156,8 +156,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -209,9 +210,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/google/cloud/spanner_v1/services/spanner/transports/rest.py b/google/cloud/spanner_v1/services/spanner/transports/rest.py index 7b49a0d76a..721e9929b3 100644 --- a/google/cloud/spanner_v1/services/spanner/transports/rest.py +++ b/google/cloud/spanner_v1/services/spanner/transports/rest.py @@ -933,9 +933,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/owl-bot-staging/spanner/v1/.coveragerc b/owl-bot-staging/spanner/v1/.coveragerc deleted file mode 100644 index 677a992799..0000000000 --- a/owl-bot-staging/spanner/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner/__init__.py - google/cloud/spanner/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/spanner/v1/.flake8 b/owl-bot-staging/spanner/v1/.flake8 deleted file mode 100644 index 90316de214..0000000000 --- a/owl-bot-staging/spanner/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/spanner/v1/LICENSE b/owl-bot-staging/spanner/v1/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/owl-bot-staging/spanner/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/spanner/v1/MANIFEST.in b/owl-bot-staging/spanner/v1/MANIFEST.in deleted file mode 100644 index dae249ec89..0000000000 --- a/owl-bot-staging/spanner/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/spanner/v1/README.rst b/owl-bot-staging/spanner/v1/README.rst deleted file mode 100644 index 502ebce1c4..0000000000 --- a/owl-bot-staging/spanner/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Spanner API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner/v1/docs/_static/custom.css b/owl-bot-staging/spanner/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b..0000000000 --- a/owl-bot-staging/spanner/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/spanner/v1/docs/_templates/layout.html b/owl-bot-staging/spanner/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcf..0000000000 --- a/owl-bot-staging/spanner/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/spanner/v1/docs/conf.py b/owl-bot-staging/spanner/v1/docs/conf.py deleted file mode 100644 index 010a6b6cda..0000000000 --- a/owl-bot-staging/spanner/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-spanner", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-spanner.tex", - u"google-cloud-spanner Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-spanner", - "google-cloud-spanner Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-spanner", - "google-cloud-spanner Documentation", - author, - "google-cloud-spanner", - "google-cloud-spanner Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner/v1/docs/index.rst b/owl-bot-staging/spanner/v1/docs/index.rst deleted file mode 100644 index 1162c85acc..0000000000 --- a/owl-bot-staging/spanner/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_v1/services_ - spanner_v1/types_ diff --git a/owl-bot-staging/spanner/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea..0000000000 --- a/owl-bot-staging/spanner/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst deleted file mode 100644 index 3bbbb55f79..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner v1 API -======================================== -.. toctree:: - :maxdepth: 2 - - spanner diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst deleted file mode 100644 index b51f4447e4..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst +++ /dev/null @@ -1,10 +0,0 @@ -Spanner -------------------------- - -.. automodule:: google.cloud.spanner_v1.services.spanner - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_v1.services.spanner.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst deleted file mode 100644 index c7ff7e6c71..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Spanner v1 API -===================================== - -.. automodule:: google.cloud.spanner_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py deleted file mode 100644 index 4757e26d69..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.spanner_v1.services.spanner.client import SpannerClient -from google.cloud.spanner_v1.services.spanner.async_client import SpannerAsyncClient - -from google.cloud.spanner_v1.types.change_stream import ChangeStreamRecord -from google.cloud.spanner_v1.types.commit_response import CommitResponse -from google.cloud.spanner_v1.types.keys import KeyRange -from google.cloud.spanner_v1.types.keys import KeySet -from google.cloud.spanner_v1.types.mutation import Mutation -from google.cloud.spanner_v1.types.query_plan import PlanNode -from google.cloud.spanner_v1.types.query_plan import QueryPlan -from google.cloud.spanner_v1.types.result_set import PartialResultSet -from google.cloud.spanner_v1.types.result_set import ResultSet -from google.cloud.spanner_v1.types.result_set import ResultSetMetadata -from google.cloud.spanner_v1.types.result_set import ResultSetStats -from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsRequest -from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsResponse -from google.cloud.spanner_v1.types.spanner import BatchWriteRequest -from google.cloud.spanner_v1.types.spanner import BatchWriteResponse -from google.cloud.spanner_v1.types.spanner import BeginTransactionRequest -from google.cloud.spanner_v1.types.spanner import CommitRequest -from google.cloud.spanner_v1.types.spanner import CreateSessionRequest -from google.cloud.spanner_v1.types.spanner import DeleteSessionRequest -from google.cloud.spanner_v1.types.spanner import DirectedReadOptions -from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlRequest -from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlResponse -from google.cloud.spanner_v1.types.spanner import ExecuteSqlRequest -from google.cloud.spanner_v1.types.spanner import GetSessionRequest -from google.cloud.spanner_v1.types.spanner import ListSessionsRequest -from google.cloud.spanner_v1.types.spanner import ListSessionsResponse -from google.cloud.spanner_v1.types.spanner import Partition -from google.cloud.spanner_v1.types.spanner import PartitionOptions -from google.cloud.spanner_v1.types.spanner import PartitionQueryRequest -from google.cloud.spanner_v1.types.spanner import PartitionReadRequest -from google.cloud.spanner_v1.types.spanner import PartitionResponse -from google.cloud.spanner_v1.types.spanner import ReadRequest -from google.cloud.spanner_v1.types.spanner import RequestOptions -from google.cloud.spanner_v1.types.spanner import RollbackRequest -from google.cloud.spanner_v1.types.spanner import Session -from google.cloud.spanner_v1.types.transaction import MultiplexedSessionPrecommitToken -from google.cloud.spanner_v1.types.transaction import Transaction -from google.cloud.spanner_v1.types.transaction import TransactionOptions -from google.cloud.spanner_v1.types.transaction import TransactionSelector -from google.cloud.spanner_v1.types.type import StructType -from google.cloud.spanner_v1.types.type import Type -from google.cloud.spanner_v1.types.type import TypeAnnotationCode -from google.cloud.spanner_v1.types.type import TypeCode - -__all__ = ('SpannerClient', - 'SpannerAsyncClient', - 'ChangeStreamRecord', - 'CommitResponse', - 'KeyRange', - 'KeySet', - 'Mutation', - 'PlanNode', - 'QueryPlan', - 'PartialResultSet', - 'ResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - 'BeginTransactionRequest', - 'CommitRequest', - 'CreateSessionRequest', - 'DeleteSessionRequest', - 'DirectedReadOptions', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'ExecuteSqlRequest', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'Partition', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'PartitionResponse', - 'ReadRequest', - 'RequestOptions', - 'RollbackRequest', - 'Session', - 'MultiplexedSessionPrecommitToken', - 'Transaction', - 'TransactionOptions', - 'TransactionSelector', - 'StructType', - 'Type', - 'TypeAnnotationCode', - 'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed deleted file mode 100644 index 0989eccd04..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py deleted file mode 100644 index 6275c8acfb..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.spanner import SpannerClient -from .services.spanner import SpannerAsyncClient - -from .types.change_stream import ChangeStreamRecord -from .types.commit_response import CommitResponse -from .types.keys import KeyRange -from .types.keys import KeySet -from .types.mutation import Mutation -from .types.query_plan import PlanNode -from .types.query_plan import QueryPlan -from .types.result_set import PartialResultSet -from .types.result_set import ResultSet -from .types.result_set import ResultSetMetadata -from .types.result_set import ResultSetStats -from .types.spanner import BatchCreateSessionsRequest -from .types.spanner import BatchCreateSessionsResponse -from .types.spanner import BatchWriteRequest -from .types.spanner import BatchWriteResponse -from .types.spanner import BeginTransactionRequest -from .types.spanner import CommitRequest -from .types.spanner import CreateSessionRequest -from .types.spanner import DeleteSessionRequest -from .types.spanner import DirectedReadOptions -from .types.spanner import ExecuteBatchDmlRequest -from .types.spanner import ExecuteBatchDmlResponse -from .types.spanner import ExecuteSqlRequest -from .types.spanner import GetSessionRequest -from .types.spanner import ListSessionsRequest -from .types.spanner import ListSessionsResponse -from .types.spanner import Partition -from .types.spanner import PartitionOptions -from .types.spanner import PartitionQueryRequest -from .types.spanner import PartitionReadRequest -from .types.spanner import PartitionResponse -from .types.spanner import ReadRequest -from .types.spanner import RequestOptions -from .types.spanner import RollbackRequest -from .types.spanner import Session -from .types.transaction import MultiplexedSessionPrecommitToken -from .types.transaction import Transaction -from .types.transaction import TransactionOptions -from .types.transaction import TransactionSelector -from .types.type import StructType -from .types.type import Type -from .types.type import TypeAnnotationCode -from .types.type import TypeCode - -__all__ = ( - 'SpannerAsyncClient', -'BatchCreateSessionsRequest', -'BatchCreateSessionsResponse', -'BatchWriteRequest', -'BatchWriteResponse', -'BeginTransactionRequest', -'ChangeStreamRecord', -'CommitRequest', -'CommitResponse', -'CreateSessionRequest', -'DeleteSessionRequest', -'DirectedReadOptions', -'ExecuteBatchDmlRequest', -'ExecuteBatchDmlResponse', -'ExecuteSqlRequest', -'GetSessionRequest', -'KeyRange', -'KeySet', -'ListSessionsRequest', -'ListSessionsResponse', -'MultiplexedSessionPrecommitToken', -'Mutation', -'PartialResultSet', -'Partition', -'PartitionOptions', -'PartitionQueryRequest', -'PartitionReadRequest', -'PartitionResponse', -'PlanNode', -'QueryPlan', -'ReadRequest', -'RequestOptions', -'ResultSet', -'ResultSetMetadata', -'ResultSetStats', -'RollbackRequest', -'Session', -'SpannerClient', -'StructType', -'Transaction', -'TransactionOptions', -'TransactionSelector', -'Type', -'TypeAnnotationCode', -'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json deleted file mode 100644 index f5957c633a..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json +++ /dev/null @@ -1,268 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_v1", - "protoPackage": "google.spanner.v1", - "schema": "1.0", - "services": { - "Spanner": { - "clients": { - "grpc": { - "libraryClient": "SpannerClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SpannerAsyncClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - }, - "rest": { - "libraryClient": "SpannerClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed deleted file mode 100644 index 0989eccd04..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py deleted file mode 100644 index a7de7a7b93..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SpannerClient -from .async_client import SpannerAsyncClient - -__all__ = ( - 'SpannerClient', - 'SpannerAsyncClient', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py deleted file mode 100644 index e3fe8cabac..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py +++ /dev/null @@ -1,2125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union - -from google.cloud.spanner_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport -from .client import SpannerClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class SpannerAsyncClient: - """Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - _client: SpannerClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = SpannerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SpannerClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = SpannerClient._DEFAULT_UNIVERSE - - database_path = staticmethod(SpannerClient.database_path) - parse_database_path = staticmethod(SpannerClient.parse_database_path) - session_path = staticmethod(SpannerClient.session_path) - parse_session_path = staticmethod(SpannerClient.parse_session_path) - common_billing_account_path = staticmethod(SpannerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SpannerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(SpannerClient.common_folder_path) - parse_common_folder_path = staticmethod(SpannerClient.parse_common_folder_path) - common_organization_path = staticmethod(SpannerClient.common_organization_path) - parse_common_organization_path = staticmethod(SpannerClient.parse_common_organization_path) - common_project_path = staticmethod(SpannerClient.common_project_path) - parse_common_project_path = staticmethod(SpannerClient.parse_common_project_path) - common_location_path = staticmethod(SpannerClient.common_location_path) - parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerAsyncClient: The constructed client. - """ - return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerAsyncClient: The constructed client. - """ - return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return SpannerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> SpannerTransport: - """Returns the transport used by the client instance. - - Returns: - SpannerTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = SpannerClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the spanner async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the SpannerTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SpannerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner_v1.SpannerAsyncClient`.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.spanner.v1.Spanner", - "credentialsType": None, - } - ) - - async def create_session(self, - request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_create_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = await client.create_session(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]]): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - database (:class:`str`): - Required. The database in which the - new session is created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CreateSessionRequest): - request = spanner.CreateSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_create_sessions(self, - request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - session_count: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.BatchCreateSessionsResponse: - r"""Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = await client.batch_create_sessions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]]): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - database (:class:`str`): - Required. The database in which the - new sessions are created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - session_count (:class:`int`): - Required. The number of sessions to be created in this - batch call. The API can return fewer than the requested - number of sessions. If a specific number of sessions are - desired, the client can make additional calls to - ``BatchCreateSessions`` (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - - This corresponds to the ``session_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, session_count] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchCreateSessionsRequest): - request = spanner.BatchCreateSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if session_count is not None: - request.session_count = session_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_create_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_session(self, - request: Optional[Union[spanner.GetSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_get_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_session(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]]): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - name (:class:`str`): - Required. The name of the session to - retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.GetSessionRequest): - request = spanner.GetSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_sessions(self, - request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSessionsAsyncPager: - r"""Lists all sessions in a given database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]]): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - database (:class:`str`): - Required. The database in which to - list sessions. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ListSessionsRequest): - request = spanner.ListSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSessionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_session(self, - request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - await client.delete_session(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]]): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - name (:class:`str`): - Required. The name of the session to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.DeleteSessionRequest): - request = spanner.DeleteSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def execute_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.execute_sql(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.execute_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_streaming_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: - r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = await client.execute_streaming_sql(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.execute_streaming_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def execute_batch_dml(self, - request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = await client.execute_batch_dml(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]]): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of - [ResultSet][google.spanner.v1.ResultSet] messages, - one for each DML statement that has successfully - executed, in the same order as the statements in the - request. If a statement fails, the status in the - response body identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. 2. If the status was not OK, check the - number of result sets in the response. If the - response contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed - successfully. - - \* Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. - - Example 2: - - - Request: 5 DML statements. The third statement has - a syntax error. - - \* Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement failed, - and the fourth and fifth statements were not - executed. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteBatchDmlRequest): - request = spanner.ExecuteBatchDmlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.execute_batch_dml] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = await client.read(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: - r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = await client.streaming_read(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.streaming_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def begin_transaction(self, - request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, - *, - session: Optional[str] = None, - options: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transaction.Transaction: - r"""Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]]): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - session (:class:`str`): - Required. The session in which the - transaction runs. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - options (:class:`google.cloud.spanner_v1.types.TransactionOptions`): - Required. Options for the new - transaction. - - This corresponds to the ``options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Transaction: - A transaction. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, options] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BeginTransactionRequest): - request = spanner.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if options is not None: - request.options = options - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def commit(self, - request: Optional[Union[spanner.CommitRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - mutations: Optional[MutableSequence[mutation.Mutation]] = None, - single_use_transaction: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> commit_response.CommitResponse: - r"""Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_commit(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.CommitRequest, dict]]): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - session (:class:`str`): - Required. The session in which the - transaction to be committed is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (:class:`bytes`): - Commit a previously-started - transaction. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (:class:`MutableSequence[google.cloud.spanner_v1.types.Mutation]`): - The mutations to be executed when - this transaction commits. All mutations - are applied atomically, in the order - they appear in this list. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - single_use_transaction (:class:`google.cloud.spanner_v1.types.TransactionOptions`): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, - commit with a temporary transaction is non-idempotent. - That is, if the ``CommitRequest`` is sent to Cloud - Spanner more than once (for instance, due to retries in - the application, or in the transport library), it's - possible that the mutations are executed more than once. - If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This corresponds to the ``single_use_transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id, mutations, single_use_transaction] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CommitRequest): - request = spanner.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - if single_use_transaction is not None: - request.single_use_transaction = single_use_transaction - if mutations: - request.mutations.extend(mutations) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rollback(self, - request: Optional[Union[spanner.RollbackRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_rollback(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - await client.rollback(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.RollbackRequest, dict]]): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - session (:class:`str`): - Required. The session in which the - transaction to roll back is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (:class:`bytes`): - Required. The transaction to roll - back. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.RollbackRequest): - request = spanner.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def partition_query(self, - request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.partition_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]]): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionQueryRequest): - request = spanner.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def partition_read(self, - request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = await client.partition_read(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]]): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionReadRequest): - request = spanner.PartitionReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.partition_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_write(self, - request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, - *, - session: Optional[str] = None, - mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[spanner.BatchWriteResponse]]: - r"""Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = await client.batch_write(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]]): - The request object. The request for - [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - session (:class:`str`): - Required. The session in which the - batch request is to be run. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutation_groups (:class:`MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]`): - Required. The groups of mutations to - be applied. - - This corresponds to the ``mutation_groups`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.BatchWriteResponse]: - The result of applying a batch of - mutations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, mutation_groups] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchWriteRequest): - request = spanner.BatchWriteRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if mutation_groups: - request.mutation_groups.extend(mutation_groups) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "SpannerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "SpannerAsyncClient", -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py deleted file mode 100644 index bd4ac8451f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py +++ /dev/null @@ -1,2486 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.spanner_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import SpannerGrpcTransport -from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport -from .transports.rest import SpannerRestTransport - - -class SpannerClientMeta(type): - """Metaclass for the Spanner client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] - _transport_registry["grpc"] = SpannerGrpcTransport - _transport_registry["grpc_asyncio"] = SpannerGrpcAsyncIOTransport - _transport_registry["rest"] = SpannerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[SpannerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SpannerClient(metaclass=SpannerClientMeta): - """Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SpannerTransport: - """Returns the transport used by the client instance. - - Returns: - SpannerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def database_path(project: str,instance: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def session_path(project: str,instance: str,database: str,session: str,) -> str: - """Returns a fully-qualified session string.""" - return "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) - - @staticmethod - def parse_session_path(path: str) -> Dict[str,str]: - """Parses a session path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/sessions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = SpannerClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = SpannerClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = SpannerClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the spanner client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the SpannerTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = SpannerClient._read_environment_variables() - self._client_cert_source = SpannerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = SpannerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, SpannerTransport) - if transport_provided: - # transport is a SpannerTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(SpannerTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - SpannerClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[SpannerTransport], Callable[..., SpannerTransport]] = ( - SpannerClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., SpannerTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner_v1.SpannerClient`.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.spanner.v1.Spanner", - "credentialsType": None, - } - ) - - def create_session(self, - request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_create_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = client.create_session(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - database (str): - Required. The database in which the - new session is created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CreateSessionRequest): - request = spanner.CreateSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_create_sessions(self, - request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - session_count: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.BatchCreateSessionsResponse: - r"""Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = client.batch_create_sessions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - database (str): - Required. The database in which the - new sessions are created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - session_count (int): - Required. The number of sessions to be created in this - batch call. The API can return fewer than the requested - number of sessions. If a specific number of sessions are - desired, the client can make additional calls to - ``BatchCreateSessions`` (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - - This corresponds to the ``session_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, session_count] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchCreateSessionsRequest): - request = spanner.BatchCreateSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if session_count is not None: - request.session_count = session_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_create_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_session(self, - request: Optional[Union[spanner.GetSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_get_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = client.get_session(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - name (str): - Required. The name of the session to - retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.GetSessionRequest): - request = spanner.GetSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_sessions(self, - request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSessionsPager: - r"""Lists all sessions in a given database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - database (str): - Required. The database in which to - list sessions. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ListSessionsRequest): - request = spanner.ListSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSessionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_session(self, - request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - client.delete_session(request=request) - - Args: - request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - name (str): - Required. The name of the session to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.DeleteSessionRequest): - request = spanner.DeleteSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def execute_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.execute_sql(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_streaming_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[result_set.PartialResultSet]: - r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = client.execute_streaming_sql(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_streaming_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_batch_dml(self, - request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = client.execute_batch_dml(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of - [ResultSet][google.spanner.v1.ResultSet] messages, - one for each DML statement that has successfully - executed, in the same order as the statements in the - request. If a statement fails, the status in the - response body identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. 2. If the status was not OK, check the - number of result sets in the response. If the - response contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed - successfully. - - \* Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. - - Example 2: - - - Request: 5 DML statements. The third statement has - a syntax error. - - \* Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement failed, - and the fourth and fifth statements were not - executed. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteBatchDmlRequest): - request = spanner.ExecuteBatchDmlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_batch_dml] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = client.read(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[result_set.PartialResultSet]: - r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = client.streaming_read(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.streaming_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def begin_transaction(self, - request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, - *, - session: Optional[str] = None, - options: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transaction.Transaction: - r"""Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - session (str): - Required. The session in which the - transaction runs. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - options (google.cloud.spanner_v1.types.TransactionOptions): - Required. Options for the new - transaction. - - This corresponds to the ``options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Transaction: - A transaction. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, options] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BeginTransactionRequest): - request = spanner.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if options is not None: - request.options = options - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def commit(self, - request: Optional[Union[spanner.CommitRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - mutations: Optional[MutableSequence[mutation.Mutation]] = None, - single_use_transaction: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> commit_response.CommitResponse: - r"""Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_commit(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - session (str): - Required. The session in which the - transaction to be committed is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (bytes): - Commit a previously-started - transaction. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): - The mutations to be executed when - this transaction commits. All mutations - are applied atomically, in the order - they appear in this list. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, - commit with a temporary transaction is non-idempotent. - That is, if the ``CommitRequest`` is sent to Cloud - Spanner more than once (for instance, due to retries in - the application, or in the transport library), it's - possible that the mutations are executed more than once. - If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This corresponds to the ``single_use_transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id, mutations, single_use_transaction] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CommitRequest): - request = spanner.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - if mutations is not None: - request.mutations = mutations - if single_use_transaction is not None: - request.single_use_transaction = single_use_transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rollback(self, - request: Optional[Union[spanner.RollbackRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_rollback(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - client.rollback(request=request) - - Args: - request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - session (str): - Required. The session in which the - transaction to roll back is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (bytes): - Required. The transaction to roll - back. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.RollbackRequest): - request = spanner.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def partition_query(self, - request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.partition_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionQueryRequest): - request = spanner.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def partition_read(self, - request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = client.partition_read(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionReadRequest): - request = spanner.PartitionReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_write(self, - request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, - *, - session: Optional[str] = None, - mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[spanner.BatchWriteResponse]: - r"""Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = client.batch_write(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]): - The request object. The request for - [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - session (str): - Required. The session in which the - batch request is to be run. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): - Required. The groups of mutations to - be applied. - - This corresponds to the ``mutation_groups`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]: - The result of applying a batch of - mutations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, mutation_groups] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchWriteRequest): - request = spanner.BatchWriteRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if mutation_groups is not None: - request.mutation_groups = mutation_groups - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "SpannerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "SpannerClient", -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py deleted file mode 100644 index 0969af8f6f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_v1.types import spanner - - -class ListSessionsPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner.ListSessionsResponse], - request: spanner.ListSessionsRequest, - response: spanner.ListSessionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.spanner_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner.Session]: - for page in self.pages: - yield from page.sessions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsAsyncPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner.ListSessionsResponse]], - request: spanner.ListSessionsRequest, - response: spanner.ListSessionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.spanner_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner.Session]: - async def async_generator(): - async for page in self.pages: - for response in page.sessions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst deleted file mode 100644 index 99997401d5..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`SpannerTransport` is the ABC for all transports. -- public child `SpannerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `SpannerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseSpannerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `SpannerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py deleted file mode 100644 index ee0b1ae37f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SpannerTransport -from .grpc import SpannerGrpcTransport -from .grpc_asyncio import SpannerGrpcAsyncIOTransport -from .rest import SpannerRestTransport -from .rest import SpannerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] -_transport_registry['grpc'] = SpannerGrpcTransport -_transport_registry['grpc_asyncio'] = SpannerGrpcAsyncIOTransport -_transport_registry['rest'] = SpannerRestTransport - -__all__ = ( - 'SpannerTransport', - 'SpannerGrpcTransport', - 'SpannerGrpcAsyncIOTransport', - 'SpannerRestTransport', - 'SpannerRestInterceptor', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py deleted file mode 100644 index 0c7b14364b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py +++ /dev/null @@ -1,505 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.spanner_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class SpannerTransport(abc.ABC): - """Abstract transport class for Spanner.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_session: gapic_v1.method.wrap_method( - self.create_session, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_create_sessions: gapic_v1.method.wrap_method( - self.batch_create_sessions, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_session: gapic_v1.method.wrap_method( - self.get_session, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_sessions: gapic_v1.method.wrap_method( - self.list_sessions, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_session: gapic_v1.method.wrap_method( - self.delete_session, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_sql: gapic_v1.method.wrap_method( - self.execute_sql, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_streaming_sql: gapic_v1.method.wrap_method( - self.execute_streaming_sql, - default_timeout=3600.0, - client_info=client_info, - ), - self.execute_batch_dml: gapic_v1.method.wrap_method( - self.execute_batch_dml, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.read: gapic_v1.method.wrap_method( - self.read, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.streaming_read: gapic_v1.method.wrap_method( - self.streaming_read, - default_timeout=3600.0, - client_info=client_info, - ), - self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.commit: gapic_v1.method.wrap_method( - self.commit, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.rollback: gapic_v1.method.wrap_method( - self.rollback, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_query: gapic_v1.method.wrap_method( - self.partition_query, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_read: gapic_v1.method.wrap_method( - self.partition_read, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_write: gapic_v1.method.wrap_method( - self.batch_write, - default_timeout=3600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - Union[ - spanner.Session, - Awaitable[spanner.Session] - ]]: - raise NotImplementedError() - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - Union[ - spanner.BatchCreateSessionsResponse, - Awaitable[spanner.BatchCreateSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - Union[ - spanner.Session, - Awaitable[spanner.Session] - ]]: - raise NotImplementedError() - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - Union[ - spanner.ListSessionsResponse, - Awaitable[spanner.ListSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Union[ - result_set.ResultSet, - Awaitable[result_set.ResultSet] - ]]: - raise NotImplementedError() - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Union[ - result_set.PartialResultSet, - Awaitable[result_set.PartialResultSet] - ]]: - raise NotImplementedError() - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - Union[ - spanner.ExecuteBatchDmlResponse, - Awaitable[spanner.ExecuteBatchDmlResponse] - ]]: - raise NotImplementedError() - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - Union[ - result_set.ResultSet, - Awaitable[result_set.ResultSet] - ]]: - raise NotImplementedError() - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - Union[ - result_set.PartialResultSet, - Awaitable[result_set.PartialResultSet] - ]]: - raise NotImplementedError() - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - Union[ - transaction.Transaction, - Awaitable[transaction.Transaction] - ]]: - raise NotImplementedError() - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - Union[ - commit_response.CommitResponse, - Awaitable[commit_response.CommitResponse] - ]]: - raise NotImplementedError() - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - Union[ - spanner.PartitionResponse, - Awaitable[spanner.PartitionResponse] - ]]: - raise NotImplementedError() - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - Union[ - spanner.PartitionResponse, - Awaitable[spanner.PartitionResponse] - ]]: - raise NotImplementedError() - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - Union[ - spanner.BatchWriteResponse, - Awaitable[spanner.BatchWriteResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'SpannerTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py deleted file mode 100644 index 064199f57e..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ /dev/null @@ -1,899 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore -from .base import SpannerTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class SpannerGrpcTransport(SpannerTransport): - """gRPC backend transport for Spanner. - - Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - spanner.Session]: - r"""Return a callable for the create session method over gRPC. - - Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - Returns: - Callable[[~.CreateSessionRequest], - ~.Session]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_session' not in self._stubs: - self._stubs['create_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=spanner.CreateSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['create_session'] - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - spanner.BatchCreateSessionsResponse]: - r"""Return a callable for the batch create sessions method over gRPC. - - Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Returns: - Callable[[~.BatchCreateSessionsRequest], - ~.BatchCreateSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_sessions' not in self._stubs: - self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=spanner.BatchCreateSessionsRequest.serialize, - response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, - ) - return self._stubs['batch_create_sessions'] - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - spanner.Session]: - r"""Return a callable for the get session method over gRPC. - - Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - Returns: - Callable[[~.GetSessionRequest], - ~.Session]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_session' not in self._stubs: - self._stubs['get_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=spanner.GetSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['get_session'] - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - spanner.ListSessionsResponse]: - r"""Return a callable for the list sessions method over gRPC. - - Lists all sessions in a given database. - - Returns: - Callable[[~.ListSessionsRequest], - ~.ListSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=spanner.ListSessionsRequest.serialize, - response_deserializer=spanner.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete session method over gRPC. - - Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - Returns: - Callable[[~.DeleteSessionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_session' not in self._stubs: - self._stubs['delete_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=spanner.DeleteSessionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_session'] - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.ResultSet]: - r"""Return a callable for the execute sql method over gRPC. - - Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - ~.ResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_sql' not in self._stubs: - self._stubs['execute_sql'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['execute_sql'] - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.PartialResultSet]: - r"""Return a callable for the execute streaming sql method over gRPC. - - Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - ~.PartialResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_streaming_sql' not in self._stubs: - self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['execute_streaming_sql'] - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - spanner.ExecuteBatchDmlResponse]: - r"""Return a callable for the execute batch dml method over gRPC. - - Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Returns: - Callable[[~.ExecuteBatchDmlRequest], - ~.ExecuteBatchDmlResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_batch_dml' not in self._stubs: - self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=spanner.ExecuteBatchDmlRequest.serialize, - response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, - ) - return self._stubs['execute_batch_dml'] - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - result_set.ResultSet]: - r"""Return a callable for the read method over gRPC. - - Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Returns: - Callable[[~.ReadRequest], - ~.ResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'read' not in self._stubs: - self._stubs['read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['read'] - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - result_set.PartialResultSet]: - r"""Return a callable for the streaming read method over gRPC. - - Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Returns: - Callable[[~.ReadRequest], - ~.PartialResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_read' not in self._stubs: - self._stubs['streaming_read'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['streaming_read'] - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - transaction.Transaction]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.Transaction]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=spanner.BeginTransactionRequest.serialize, - response_deserializer=transaction.Transaction.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - commit_response.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=spanner.CommitRequest.serialize, - response_deserializer=commit_response.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - empty_pb2.Empty]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - Returns: - Callable[[~.RollbackRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=spanner.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - spanner.PartitionResponse]: - r"""Return a callable for the partition query method over gRPC. - - Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionQueryRequest], - ~.PartitionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=spanner.PartitionQueryRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - spanner.PartitionResponse]: - r"""Return a callable for the partition read method over gRPC. - - Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionReadRequest], - ~.PartitionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_read' not in self._stubs: - self._stubs['partition_read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=spanner.PartitionReadRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_read'] - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - spanner.BatchWriteResponse]: - r"""Return a callable for the batch write method over gRPC. - - Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - Returns: - Callable[[~.BatchWriteRequest], - ~.BatchWriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_write' not in self._stubs: - self._stubs['batch_write'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/BatchWrite', - request_serializer=spanner.BatchWriteRequest.serialize, - response_deserializer=spanner.BatchWriteResponse.deserialize, - ) - return self._stubs['batch_write'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'SpannerGrpcTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py deleted file mode 100644 index 3efcf7071f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ /dev/null @@ -1,1125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore -from .base import SpannerTransport, DEFAULT_CLIENT_INFO -from .grpc import SpannerGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class SpannerGrpcAsyncIOTransport(SpannerTransport): - """gRPC AsyncIO backend transport for Spanner. - - Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - Awaitable[spanner.Session]]: - r"""Return a callable for the create session method over gRPC. - - Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - Returns: - Callable[[~.CreateSessionRequest], - Awaitable[~.Session]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_session' not in self._stubs: - self._stubs['create_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=spanner.CreateSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['create_session'] - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - Awaitable[spanner.BatchCreateSessionsResponse]]: - r"""Return a callable for the batch create sessions method over gRPC. - - Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Returns: - Callable[[~.BatchCreateSessionsRequest], - Awaitable[~.BatchCreateSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_sessions' not in self._stubs: - self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=spanner.BatchCreateSessionsRequest.serialize, - response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, - ) - return self._stubs['batch_create_sessions'] - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - Awaitable[spanner.Session]]: - r"""Return a callable for the get session method over gRPC. - - Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - Returns: - Callable[[~.GetSessionRequest], - Awaitable[~.Session]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_session' not in self._stubs: - self._stubs['get_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=spanner.GetSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['get_session'] - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - Awaitable[spanner.ListSessionsResponse]]: - r"""Return a callable for the list sessions method over gRPC. - - Lists all sessions in a given database. - - Returns: - Callable[[~.ListSessionsRequest], - Awaitable[~.ListSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=spanner.ListSessionsRequest.serialize, - response_deserializer=spanner.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete session method over gRPC. - - Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - Returns: - Callable[[~.DeleteSessionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_session' not in self._stubs: - self._stubs['delete_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=spanner.DeleteSessionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_session'] - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Awaitable[result_set.ResultSet]]: - r"""Return a callable for the execute sql method over gRPC. - - Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - Awaitable[~.ResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_sql' not in self._stubs: - self._stubs['execute_sql'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['execute_sql'] - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Awaitable[result_set.PartialResultSet]]: - r"""Return a callable for the execute streaming sql method over gRPC. - - Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - Awaitable[~.PartialResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_streaming_sql' not in self._stubs: - self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['execute_streaming_sql'] - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - Awaitable[spanner.ExecuteBatchDmlResponse]]: - r"""Return a callable for the execute batch dml method over gRPC. - - Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Returns: - Callable[[~.ExecuteBatchDmlRequest], - Awaitable[~.ExecuteBatchDmlResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_batch_dml' not in self._stubs: - self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=spanner.ExecuteBatchDmlRequest.serialize, - response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, - ) - return self._stubs['execute_batch_dml'] - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - Awaitable[result_set.ResultSet]]: - r"""Return a callable for the read method over gRPC. - - Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Returns: - Callable[[~.ReadRequest], - Awaitable[~.ResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'read' not in self._stubs: - self._stubs['read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['read'] - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - Awaitable[result_set.PartialResultSet]]: - r"""Return a callable for the streaming read method over gRPC. - - Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Returns: - Callable[[~.ReadRequest], - Awaitable[~.PartialResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_read' not in self._stubs: - self._stubs['streaming_read'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['streaming_read'] - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - Awaitable[transaction.Transaction]]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.Transaction]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=spanner.BeginTransactionRequest.serialize, - response_deserializer=transaction.Transaction.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - Awaitable[commit_response.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=spanner.CommitRequest.serialize, - response_deserializer=commit_response.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=spanner.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - Awaitable[spanner.PartitionResponse]]: - r"""Return a callable for the partition query method over gRPC. - - Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionQueryRequest], - Awaitable[~.PartitionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=spanner.PartitionQueryRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - Awaitable[spanner.PartitionResponse]]: - r"""Return a callable for the partition read method over gRPC. - - Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionReadRequest], - Awaitable[~.PartitionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_read' not in self._stubs: - self._stubs['partition_read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=spanner.PartitionReadRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_read'] - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - Awaitable[spanner.BatchWriteResponse]]: - r"""Return a callable for the batch write method over gRPC. - - Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - Returns: - Callable[[~.BatchWriteRequest], - Awaitable[~.BatchWriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_write' not in self._stubs: - self._stubs['batch_write'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/BatchWrite', - request_serializer=spanner.BatchWriteRequest.serialize, - response_deserializer=spanner.BatchWriteResponse.deserialize, - ) - return self._stubs['batch_write'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_session: self._wrap_method( - self.create_session, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_create_sessions: self._wrap_method( - self.batch_create_sessions, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_session: self._wrap_method( - self.get_session, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_sessions: self._wrap_method( - self.list_sessions, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_session: self._wrap_method( - self.delete_session, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_sql: self._wrap_method( - self.execute_sql, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_streaming_sql: self._wrap_method( - self.execute_streaming_sql, - default_timeout=3600.0, - client_info=client_info, - ), - self.execute_batch_dml: self._wrap_method( - self.execute_batch_dml, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.read: self._wrap_method( - self.read, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.streaming_read: self._wrap_method( - self.streaming_read, - default_timeout=3600.0, - client_info=client_info, - ), - self.begin_transaction: self._wrap_method( - self.begin_transaction, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.commit: self._wrap_method( - self.commit, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.rollback: self._wrap_method( - self.rollback, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_query: self._wrap_method( - self.partition_query, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_read: self._wrap_method( - self.partition_read, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_write: self._wrap_method( - self.batch_write, - default_timeout=3600.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'SpannerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py deleted file mode 100644 index 206ddb9999..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py +++ /dev/null @@ -1,2898 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseSpannerRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class SpannerRestInterceptor: - """Interceptor for Spanner. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the SpannerRestTransport. - - .. code-block:: python - class MyCustomSpannerInterceptor(SpannerRestInterceptor): - def pre_batch_create_sessions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_create_sessions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_batch_write(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_write(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_begin_transaction(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_begin_transaction(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_commit(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_commit(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_session(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_session(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_session(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_execute_batch_dml(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_batch_dml(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_execute_sql(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_sql(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_execute_streaming_sql(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_streaming_sql(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_session(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_session(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_sessions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_sessions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_partition_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_partition_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_partition_read(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_partition_read(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_read(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_read(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rollback(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_streaming_read(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_streaming_read(self, response): - logging.log(f"Received response: {response}") - return response - - transport = SpannerRestTransport(interceptor=MyCustomSpannerInterceptor()) - client = SpannerClient(transport=transport) - - - """ - def pre_batch_create_sessions(self, request: spanner.BatchCreateSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_create_sessions - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_batch_create_sessions(self, response: spanner.BatchCreateSessionsResponse) -> spanner.BatchCreateSessionsResponse: - """Post-rpc interceptor for batch_create_sessions - - DEPRECATED. Please use the `post_batch_create_sessions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_batch_create_sessions` interceptor runs - before the `post_batch_create_sessions_with_metadata` interceptor. - """ - return response - - def post_batch_create_sessions_with_metadata(self, response: spanner.BatchCreateSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_create_sessions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_batch_create_sessions_with_metadata` - interceptor in new development instead of the `post_batch_create_sessions` interceptor. - When both interceptors are used, this `post_batch_create_sessions_with_metadata` interceptor runs after the - `post_batch_create_sessions` interceptor. The (possibly modified) response returned by - `post_batch_create_sessions` will be passed to - `post_batch_create_sessions_with_metadata`. - """ - return response, metadata - - def pre_batch_write(self, request: spanner.BatchWriteRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchWriteRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_write - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_batch_write(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for batch_write - - DEPRECATED. Please use the `post_batch_write_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_batch_write` interceptor runs - before the `post_batch_write_with_metadata` interceptor. - """ - return response - - def post_batch_write_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_write - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_batch_write_with_metadata` - interceptor in new development instead of the `post_batch_write` interceptor. - When both interceptors are used, this `post_batch_write_with_metadata` interceptor runs after the - `post_batch_write` interceptor. The (possibly modified) response returned by - `post_batch_write` will be passed to - `post_batch_write_with_metadata`. - """ - return response, metadata - - def pre_begin_transaction(self, request: spanner.BeginTransactionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_begin_transaction(self, response: transaction.Transaction) -> transaction.Transaction: - """Post-rpc interceptor for begin_transaction - - DEPRECATED. Please use the `post_begin_transaction_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_begin_transaction` interceptor runs - before the `post_begin_transaction_with_metadata` interceptor. - """ - return response - - def post_begin_transaction_with_metadata(self, response: transaction.Transaction, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transaction.Transaction, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for begin_transaction - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_begin_transaction_with_metadata` - interceptor in new development instead of the `post_begin_transaction` interceptor. - When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the - `post_begin_transaction` interceptor. The (possibly modified) response returned by - `post_begin_transaction` will be passed to - `post_begin_transaction_with_metadata`. - """ - return response, metadata - - def pre_commit(self, request: spanner.CommitRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for commit - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_commit(self, response: commit_response.CommitResponse) -> commit_response.CommitResponse: - """Post-rpc interceptor for commit - - DEPRECATED. Please use the `post_commit_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_commit` interceptor runs - before the `post_commit_with_metadata` interceptor. - """ - return response - - def post_commit_with_metadata(self, response: commit_response.CommitResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[commit_response.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for commit - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_commit_with_metadata` - interceptor in new development instead of the `post_commit` interceptor. - When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the - `post_commit` interceptor. The (possibly modified) response returned by - `post_commit` will be passed to - `post_commit_with_metadata`. - """ - return response, metadata - - def pre_create_session(self, request: spanner.CreateSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CreateSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_session - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_create_session(self, response: spanner.Session) -> spanner.Session: - """Post-rpc interceptor for create_session - - DEPRECATED. Please use the `post_create_session_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_create_session` interceptor runs - before the `post_create_session_with_metadata` interceptor. - """ - return response - - def post_create_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_session - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_create_session_with_metadata` - interceptor in new development instead of the `post_create_session` interceptor. - When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the - `post_create_session` interceptor. The (possibly modified) response returned by - `post_create_session` will be passed to - `post_create_session_with_metadata`. - """ - return response, metadata - - def pre_delete_session(self, request: spanner.DeleteSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.DeleteSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_session - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def pre_execute_batch_dml(self, request: spanner.ExecuteBatchDmlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for execute_batch_dml - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_execute_batch_dml(self, response: spanner.ExecuteBatchDmlResponse) -> spanner.ExecuteBatchDmlResponse: - """Post-rpc interceptor for execute_batch_dml - - DEPRECATED. Please use the `post_execute_batch_dml_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_execute_batch_dml` interceptor runs - before the `post_execute_batch_dml_with_metadata` interceptor. - """ - return response - - def post_execute_batch_dml_with_metadata(self, response: spanner.ExecuteBatchDmlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for execute_batch_dml - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_execute_batch_dml_with_metadata` - interceptor in new development instead of the `post_execute_batch_dml` interceptor. - When both interceptors are used, this `post_execute_batch_dml_with_metadata` interceptor runs after the - `post_execute_batch_dml` interceptor. The (possibly modified) response returned by - `post_execute_batch_dml` will be passed to - `post_execute_batch_dml_with_metadata`. - """ - return response, metadata - - def pre_execute_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for execute_sql - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_execute_sql(self, response: result_set.ResultSet) -> result_set.ResultSet: - """Post-rpc interceptor for execute_sql - - DEPRECATED. Please use the `post_execute_sql_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_execute_sql` interceptor runs - before the `post_execute_sql_with_metadata` interceptor. - """ - return response - - def post_execute_sql_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for execute_sql - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_execute_sql_with_metadata` - interceptor in new development instead of the `post_execute_sql` interceptor. - When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the - `post_execute_sql` interceptor. The (possibly modified) response returned by - `post_execute_sql` will be passed to - `post_execute_sql_with_metadata`. - """ - return response, metadata - - def pre_execute_streaming_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for execute_streaming_sql - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_execute_streaming_sql(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for execute_streaming_sql - - DEPRECATED. Please use the `post_execute_streaming_sql_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_execute_streaming_sql` interceptor runs - before the `post_execute_streaming_sql_with_metadata` interceptor. - """ - return response - - def post_execute_streaming_sql_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for execute_streaming_sql - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_execute_streaming_sql_with_metadata` - interceptor in new development instead of the `post_execute_streaming_sql` interceptor. - When both interceptors are used, this `post_execute_streaming_sql_with_metadata` interceptor runs after the - `post_execute_streaming_sql` interceptor. The (possibly modified) response returned by - `post_execute_streaming_sql` will be passed to - `post_execute_streaming_sql_with_metadata`. - """ - return response, metadata - - def pre_get_session(self, request: spanner.GetSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.GetSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_session - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_get_session(self, response: spanner.Session) -> spanner.Session: - """Post-rpc interceptor for get_session - - DEPRECATED. Please use the `post_get_session_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_get_session` interceptor runs - before the `post_get_session_with_metadata` interceptor. - """ - return response - - def post_get_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_session - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_get_session_with_metadata` - interceptor in new development instead of the `post_get_session` interceptor. - When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the - `post_get_session` interceptor. The (possibly modified) response returned by - `post_get_session` will be passed to - `post_get_session_with_metadata`. - """ - return response, metadata - - def pre_list_sessions(self, request: spanner.ListSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_sessions - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_list_sessions(self, response: spanner.ListSessionsResponse) -> spanner.ListSessionsResponse: - """Post-rpc interceptor for list_sessions - - DEPRECATED. Please use the `post_list_sessions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_list_sessions` interceptor runs - before the `post_list_sessions_with_metadata` interceptor. - """ - return response - - def post_list_sessions_with_metadata(self, response: spanner.ListSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_sessions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_list_sessions_with_metadata` - interceptor in new development instead of the `post_list_sessions` interceptor. - When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the - `post_list_sessions` interceptor. The (possibly modified) response returned by - `post_list_sessions` will be passed to - `post_list_sessions_with_metadata`. - """ - return response, metadata - - def pre_partition_query(self, request: spanner.PartitionQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for partition_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_partition_query(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: - """Post-rpc interceptor for partition_query - - DEPRECATED. Please use the `post_partition_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_partition_query` interceptor runs - before the `post_partition_query_with_metadata` interceptor. - """ - return response - - def post_partition_query_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for partition_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_partition_query_with_metadata` - interceptor in new development instead of the `post_partition_query` interceptor. - When both interceptors are used, this `post_partition_query_with_metadata` interceptor runs after the - `post_partition_query` interceptor. The (possibly modified) response returned by - `post_partition_query` will be passed to - `post_partition_query_with_metadata`. - """ - return response, metadata - - def pre_partition_read(self, request: spanner.PartitionReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for partition_read - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_partition_read(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: - """Post-rpc interceptor for partition_read - - DEPRECATED. Please use the `post_partition_read_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_partition_read` interceptor runs - before the `post_partition_read_with_metadata` interceptor. - """ - return response - - def post_partition_read_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for partition_read - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_partition_read_with_metadata` - interceptor in new development instead of the `post_partition_read` interceptor. - When both interceptors are used, this `post_partition_read_with_metadata` interceptor runs after the - `post_partition_read` interceptor. The (possibly modified) response returned by - `post_partition_read` will be passed to - `post_partition_read_with_metadata`. - """ - return response, metadata - - def pre_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for read - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_read(self, response: result_set.ResultSet) -> result_set.ResultSet: - """Post-rpc interceptor for read - - DEPRECATED. Please use the `post_read_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_read` interceptor runs - before the `post_read_with_metadata` interceptor. - """ - return response - - def post_read_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for read - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_read_with_metadata` - interceptor in new development instead of the `post_read` interceptor. - When both interceptors are used, this `post_read_with_metadata` interceptor runs after the - `post_read` interceptor. The (possibly modified) response returned by - `post_read` will be passed to - `post_read_with_metadata`. - """ - return response, metadata - - def pre_rollback(self, request: spanner.RollbackRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rollback - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def pre_streaming_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for streaming_read - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_streaming_read(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for streaming_read - - DEPRECATED. Please use the `post_streaming_read_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_streaming_read` interceptor runs - before the `post_streaming_read_with_metadata` interceptor. - """ - return response - - def post_streaming_read_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for streaming_read - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_streaming_read_with_metadata` - interceptor in new development instead of the `post_streaming_read` interceptor. - When both interceptors are used, this `post_streaming_read_with_metadata` interceptor runs after the - `post_streaming_read` interceptor. The (possibly modified) response returned by - `post_streaming_read` will be passed to - `post_streaming_read_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class SpannerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: SpannerRestInterceptor - - -class SpannerRestTransport(_BaseSpannerRestTransport): - """REST backend synchronous transport for Spanner. - - Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[SpannerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or SpannerRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _BatchCreateSessions(_BaseSpannerRestTransport._BaseBatchCreateSessions, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.BatchCreateSessions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.BatchCreateSessionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.BatchCreateSessionsResponse: - r"""Call the batch create sessions method over HTTP. - - Args: - request (~.spanner.BatchCreateSessionsRequest): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - - http_options = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_http_options() - - request, metadata = self._interceptor.pre_batch_create_sessions(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.BatchCreateSessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchCreateSessions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._BatchCreateSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.BatchCreateSessionsResponse() - pb_resp = spanner.BatchCreateSessionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_create_sessions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_create_sessions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.BatchCreateSessionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.batch_create_sessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchCreateSessions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BatchWrite(_BaseSpannerRestTransport._BaseBatchWrite, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.BatchWrite") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__(self, - request: spanner.BatchWriteRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the batch write method over HTTP. - - Args: - request (~.spanner.BatchWriteRequest): - The request object. The request for - [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.BatchWriteResponse: - The result of applying a batch of - mutations. - - """ - - http_options = _BaseSpannerRestTransport._BaseBatchWrite._get_http_options() - - request, metadata = self._interceptor.pre_batch_write(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseBatchWrite._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseBatchWrite._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseBatchWrite._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.BatchWrite", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchWrite", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._BatchWrite._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, spanner.BatchWriteResponse) - - resp = self._interceptor.post_batch_write(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_write_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.batch_write", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchWrite", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BeginTransaction(_BaseSpannerRestTransport._BaseBeginTransaction, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.BeginTransaction") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.BeginTransactionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> transaction.Transaction: - r"""Call the begin transaction method over HTTP. - - Args: - request (~.spanner.BeginTransactionRequest): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.transaction.Transaction: - A transaction. - """ - - http_options = _BaseSpannerRestTransport._BaseBeginTransaction._get_http_options() - - request, metadata = self._interceptor.pre_begin_transaction(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseBeginTransaction._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseBeginTransaction._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseBeginTransaction._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.BeginTransaction", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BeginTransaction", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._BeginTransaction._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = transaction.Transaction() - pb_resp = transaction.Transaction.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_begin_transaction(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_begin_transaction_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = transaction.Transaction.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.begin_transaction", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BeginTransaction", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Commit(_BaseSpannerRestTransport._BaseCommit, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.Commit") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.CommitRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> commit_response.CommitResponse: - r"""Call the commit method over HTTP. - - Args: - request (~.spanner.CommitRequest): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.commit_response.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - - http_options = _BaseSpannerRestTransport._BaseCommit._get_http_options() - - request, metadata = self._interceptor.pre_commit(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseCommit._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseCommit._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseCommit._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.Commit", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Commit", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._Commit._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = commit_response.CommitResponse() - pb_resp = commit_response.CommitResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_commit(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_commit_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = commit_response.CommitResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.commit", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Commit", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateSession(_BaseSpannerRestTransport._BaseCreateSession, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.CreateSession") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.CreateSessionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.Session: - r"""Call the create session method over HTTP. - - Args: - request (~.spanner.CreateSessionRequest): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.Session: - A session in the Cloud Spanner API. - """ - - http_options = _BaseSpannerRestTransport._BaseCreateSession._get_http_options() - - request, metadata = self._interceptor.pre_create_session(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseCreateSession._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseCreateSession._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseCreateSession._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.CreateSession", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "CreateSession", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._CreateSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.Session() - pb_resp = spanner.Session.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_session(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_session_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.Session.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.create_session", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "CreateSession", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteSession(_BaseSpannerRestTransport._BaseDeleteSession, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.DeleteSession") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner.DeleteSessionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete session method over HTTP. - - Args: - request (~.spanner.DeleteSessionRequest): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseSpannerRestTransport._BaseDeleteSession._get_http_options() - - request, metadata = self._interceptor.pre_delete_session(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseDeleteSession._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseDeleteSession._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.DeleteSession", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "DeleteSession", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._DeleteSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _ExecuteBatchDml(_BaseSpannerRestTransport._BaseExecuteBatchDml, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ExecuteBatchDml") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.ExecuteBatchDmlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Call the execute batch dml method over HTTP. - - Args: - request (~.spanner.ExecuteBatchDmlRequest): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.ExecuteBatchDmlResponse: - The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of - [ResultSet][google.spanner.v1.ResultSet] messages, one - for each DML statement that has successfully executed, - in the same order as the statements in the request. If a - statement fails, the status in the response body - identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value ``OK`` - indicates that all statements were executed - successfully. - 2. If the status was not ``OK``, check the number of - result sets in the response. If the response contains - ``N`` [ResultSet][google.spanner.v1.ResultSet] - messages, then statement ``N+1`` in the request - failed. - - Example 1: - - - Request: 5 DML statements, all executed successfully. - - Response: 5 [ResultSet][google.spanner.v1.ResultSet] - messages, with the status ``OK``. - - Example 2: - - - Request: 5 DML statements. The third statement has a - syntax error. - - Response: 2 [ResultSet][google.spanner.v1.ResultSet] - messages, and a syntax error (``INVALID_ARGUMENT``) - status. The number of - [ResultSet][google.spanner.v1.ResultSet] messages - indicates that the third statement failed, and the - fourth and fifth statements were not executed. - - """ - - http_options = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_http_options() - - request, metadata = self._interceptor.pre_execute_batch_dml(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ExecuteBatchDml", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteBatchDml", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ExecuteBatchDml._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.ExecuteBatchDmlResponse() - pb_resp = spanner.ExecuteBatchDmlResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_execute_batch_dml(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_batch_dml_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.ExecuteBatchDmlResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.execute_batch_dml", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteBatchDml", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExecuteSql(_BaseSpannerRestTransport._BaseExecuteSql, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ExecuteSql") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.ExecuteSqlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> result_set.ResultSet: - r"""Call the execute sql method over HTTP. - - Args: - request (~.spanner.ExecuteSqlRequest): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - - http_options = _BaseSpannerRestTransport._BaseExecuteSql._get_http_options() - - request, metadata = self._interceptor.pre_execute_sql(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseExecuteSql._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseExecuteSql._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseExecuteSql._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ExecuteSql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteSql", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ExecuteSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = result_set.ResultSet() - pb_resp = result_set.ResultSet.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_execute_sql(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_sql_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = result_set.ResultSet.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.execute_sql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteSql", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExecuteStreamingSql(_BaseSpannerRestTransport._BaseExecuteStreamingSql, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ExecuteStreamingSql") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__(self, - request: spanner.ExecuteSqlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the execute streaming sql method over HTTP. - - Args: - request (~.spanner.ExecuteSqlRequest): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.PartialResultSet: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - - http_options = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_http_options() - - request, metadata = self._interceptor.pre_execute_streaming_sql(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ExecuteStreamingSql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteStreamingSql", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ExecuteStreamingSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) - - resp = self._interceptor.post_execute_streaming_sql(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_streaming_sql_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.execute_streaming_sql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteStreamingSql", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetSession(_BaseSpannerRestTransport._BaseGetSession, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.GetSession") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner.GetSessionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.Session: - r"""Call the get session method over HTTP. - - Args: - request (~.spanner.GetSessionRequest): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.Session: - A session in the Cloud Spanner API. - """ - - http_options = _BaseSpannerRestTransport._BaseGetSession._get_http_options() - - request, metadata = self._interceptor.pre_get_session(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseGetSession._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseGetSession._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.GetSession", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "GetSession", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._GetSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.Session() - pb_resp = spanner.Session.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_session(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_session_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.Session.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.get_session", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "GetSession", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListSessions(_BaseSpannerRestTransport._BaseListSessions, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ListSessions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner.ListSessionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.ListSessionsResponse: - r"""Call the list sessions method over HTTP. - - Args: - request (~.spanner.ListSessionsRequest): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.ListSessionsResponse: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - """ - - http_options = _BaseSpannerRestTransport._BaseListSessions._get_http_options() - - request, metadata = self._interceptor.pre_list_sessions(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseListSessions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseListSessions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ListSessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ListSessions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ListSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.ListSessionsResponse() - pb_resp = spanner.ListSessionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_sessions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_sessions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.ListSessionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.list_sessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ListSessions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _PartitionQuery(_BaseSpannerRestTransport._BasePartitionQuery, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.PartitionQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.PartitionQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.PartitionResponse: - r"""Call the partition query method over HTTP. - - Args: - request (~.spanner.PartitionQueryRequest): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.PartitionResponse: - The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - - http_options = _BaseSpannerRestTransport._BasePartitionQuery._get_http_options() - - request, metadata = self._interceptor.pre_partition_query(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BasePartitionQuery._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BasePartitionQuery._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BasePartitionQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.PartitionQuery", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._PartitionQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.PartitionResponse() - pb_resp = spanner.PartitionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_partition_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_partition_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.PartitionResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.partition_query", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _PartitionRead(_BaseSpannerRestTransport._BasePartitionRead, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.PartitionRead") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.PartitionReadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.PartitionResponse: - r"""Call the partition read method over HTTP. - - Args: - request (~.spanner.PartitionReadRequest): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.PartitionResponse: - The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - - http_options = _BaseSpannerRestTransport._BasePartitionRead._get_http_options() - - request, metadata = self._interceptor.pre_partition_read(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BasePartitionRead._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BasePartitionRead._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BasePartitionRead._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.PartitionRead", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionRead", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._PartitionRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.PartitionResponse() - pb_resp = spanner.PartitionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_partition_read(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_partition_read_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.PartitionResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.partition_read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionRead", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Read(_BaseSpannerRestTransport._BaseRead, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.Read") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.ReadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> result_set.ResultSet: - r"""Call the read method over HTTP. - - Args: - request (~.spanner.ReadRequest): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - - http_options = _BaseSpannerRestTransport._BaseRead._get_http_options() - - request, metadata = self._interceptor.pre_read(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseRead._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseRead._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseRead._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.Read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Read", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._Read._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = result_set.ResultSet() - pb_resp = result_set.ResultSet.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_read(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_read_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = result_set.ResultSet.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Read", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Rollback(_BaseSpannerRestTransport._BaseRollback, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.Rollback") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.RollbackRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the rollback method over HTTP. - - Args: - request (~.spanner.RollbackRequest): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseSpannerRestTransport._BaseRollback._get_http_options() - - request, metadata = self._interceptor.pre_rollback(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseRollback._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseRollback._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseRollback._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.Rollback", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Rollback", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._Rollback._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _StreamingRead(_BaseSpannerRestTransport._BaseStreamingRead, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.StreamingRead") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__(self, - request: spanner.ReadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the streaming read method over HTTP. - - Args: - request (~.spanner.ReadRequest): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.PartialResultSet: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - - http_options = _BaseSpannerRestTransport._BaseStreamingRead._get_http_options() - - request, metadata = self._interceptor.pre_streaming_read(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseStreamingRead._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseStreamingRead._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseStreamingRead._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.StreamingRead", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "StreamingRead", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._StreamingRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) - - resp = self._interceptor.post_streaming_read(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_streaming_read_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.streaming_read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "StreamingRead", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - spanner.BatchCreateSessionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchCreateSessions(self._session, self._host, self._interceptor) # type: ignore - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - spanner.BatchWriteResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - transaction.Transaction]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - commit_response.CommitResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Commit(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - spanner.Session]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSession(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSession(self._session, self._host, self._interceptor) # type: ignore - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - spanner.ExecuteBatchDmlResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecuteBatchDml(self._session, self._host, self._interceptor) # type: ignore - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.ResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecuteSql(self._session, self._host, self._interceptor) # type: ignore - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.PartialResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecuteStreamingSql(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - spanner.Session]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSession(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - spanner.ListSessionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - spanner.PartitionResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - spanner.PartitionResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._PartitionRead(self._session, self._host, self._interceptor) # type: ignore - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - result_set.ResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Read(self._session, self._host, self._interceptor) # type: ignore - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Rollback(self._session, self._host, self._interceptor) # type: ignore - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - result_set.PartialResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StreamingRead(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'SpannerRestTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py deleted file mode 100644 index 79eb05d0cf..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py +++ /dev/null @@ -1,817 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import SpannerTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseSpannerRestTransport(SpannerTransport): - """Base REST backend transport for Spanner. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseBatchCreateSessions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.BatchCreateSessionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseBatchCreateSessions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBatchWrite: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.BatchWriteRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseBatchWrite._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBeginTransaction: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseBeginTransaction._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCommit: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseCommit._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateSession: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.CreateSessionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseCreateSession._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteSession: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.DeleteSessionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseDeleteSession._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExecuteBatchDml: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeBatchDml', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ExecuteBatchDmlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseExecuteBatchDml._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExecuteSql: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeSql', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ExecuteSqlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseExecuteSql._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExecuteStreamingSql: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeStreamingSql', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ExecuteSqlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseExecuteStreamingSql._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetSession: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.GetSessionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseGetSession._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListSessions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ListSessionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseListSessions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BasePartitionQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionQuery', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.PartitionQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BasePartitionQuery._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BasePartitionRead: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionRead', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.PartitionReadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BasePartitionRead._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRead: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:read', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ReadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseRead._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRollback: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseRollback._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseStreamingRead: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:streamingRead', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ReadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseStreamingRead._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseSpannerRestTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py deleted file mode 100644 index d4c6938ee1..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py +++ /dev/null @@ -1,122 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .change_stream import ( - ChangeStreamRecord, -) -from .commit_response import ( - CommitResponse, -) -from .keys import ( - KeyRange, - KeySet, -) -from .mutation import ( - Mutation, -) -from .query_plan import ( - PlanNode, - QueryPlan, -) -from .result_set import ( - PartialResultSet, - ResultSet, - ResultSetMetadata, - ResultSetStats, -) -from .spanner import ( - BatchCreateSessionsRequest, - BatchCreateSessionsResponse, - BatchWriteRequest, - BatchWriteResponse, - BeginTransactionRequest, - CommitRequest, - CreateSessionRequest, - DeleteSessionRequest, - DirectedReadOptions, - ExecuteBatchDmlRequest, - ExecuteBatchDmlResponse, - ExecuteSqlRequest, - GetSessionRequest, - ListSessionsRequest, - ListSessionsResponse, - Partition, - PartitionOptions, - PartitionQueryRequest, - PartitionReadRequest, - PartitionResponse, - ReadRequest, - RequestOptions, - RollbackRequest, - Session, -) -from .transaction import ( - MultiplexedSessionPrecommitToken, - Transaction, - TransactionOptions, - TransactionSelector, -) -from .type import ( - StructType, - Type, - TypeAnnotationCode, - TypeCode, -) - -__all__ = ( - 'ChangeStreamRecord', - 'CommitResponse', - 'KeyRange', - 'KeySet', - 'Mutation', - 'PlanNode', - 'QueryPlan', - 'PartialResultSet', - 'ResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - 'BeginTransactionRequest', - 'CommitRequest', - 'CreateSessionRequest', - 'DeleteSessionRequest', - 'DirectedReadOptions', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'ExecuteSqlRequest', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'Partition', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'PartitionResponse', - 'ReadRequest', - 'RequestOptions', - 'RollbackRequest', - 'Session', - 'MultiplexedSessionPrecommitToken', - 'Transaction', - 'TransactionOptions', - 'TransactionSelector', - 'StructType', - 'Type', - 'TypeAnnotationCode', - 'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py deleted file mode 100644 index 8db7f85bb5..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py +++ /dev/null @@ -1,683 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'ChangeStreamRecord', - }, -) - - -class ChangeStreamRecord(proto.Message): - r"""Spanner Change Streams enable customers to capture and stream out - changes to their Spanner databases in real-time. A change stream can - be created with option partition_mode='IMMUTABLE_KEY_RANGE' or - partition_mode='MUTABLE_KEY_RANGE'. - - This message is only used in Change Streams created with the option - partition_mode='MUTABLE_KEY_RANGE'. Spanner automatically creates a - special Table-Valued Function (TVF) along with each Change Streams. - The function provides access to the change stream's records. The - function is named READ\_ (where - is the name of the change stream), and it - returns a table with only one column called ChangeRecord. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - data_change_record (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord): - Data change record describing a data change - for a change stream partition. - - This field is a member of `oneof`_ ``record``. - heartbeat_record (google.cloud.spanner_v1.types.ChangeStreamRecord.HeartbeatRecord): - Heartbeat record describing a heartbeat for a - change stream partition. - - This field is a member of `oneof`_ ``record``. - partition_start_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionStartRecord): - Partition start record describing a new - change stream partition. - - This field is a member of `oneof`_ ``record``. - partition_end_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEndRecord): - Partition end record describing a terminated - change stream partition. - - This field is a member of `oneof`_ ``record``. - partition_event_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord): - Partition event record describing key range - changes for a change stream partition. - - This field is a member of `oneof`_ ``record``. - """ - - class DataChangeRecord(proto.Message): - r"""A data change record contains a set of changes to a table - with the same modification type (insert, update, or delete) - committed at the same commit timestamp in one change stream - partition for the same transaction. Multiple data change records - can be returned for the same transaction across multiple change - stream partitions. - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Indicates the timestamp in which the change was committed. - DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - - The record sequence number ordering across partitions is - only meaningful in the context of a specific transaction. - Record sequence numbers are unique across partitions for a - specific transaction. Sort the DataChangeRecords for the - same - [server_transaction_id][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.server_transaction_id] - by - [record_sequence][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.record_sequence] - to reconstruct the ordering of the changes within the - transaction. - server_transaction_id (str): - Provides a globally unique string that represents the - transaction in which the change was committed. Multiple - transactions can have the same commit timestamp, but each - transaction has a unique server_transaction_id. - is_last_record_in_transaction_in_partition (bool): - Indicates whether this is the last record for - a transaction in the current partition. Clients - can use this field to determine when all - records for a transaction in the current - partition have been received. - table (str): - Name of the table affected by the change. - column_metadata (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ColumnMetadata]): - Provides metadata describing the columns associated with the - [mods][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods] - listed below. - mods (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.Mod]): - Describes the changes that were made. - mod_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModType): - Describes the type of change. - value_capture_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ValueCaptureType): - Describes the value capture type that was - specified in the change stream configuration - when this change was captured. - number_of_records_in_transaction (int): - Indicates the number of data change records - that are part of this transaction across all - change stream partitions. This value can be used - to assemble all the records associated with a - particular transaction. - number_of_partitions_in_transaction (int): - Indicates the number of partitions that - return data change records for this transaction. - This value can be helpful in assembling all - records associated with a particular - transaction. - transaction_tag (str): - Indicates the transaction tag associated with - this transaction. - is_system_transaction (bool): - Indicates whether the transaction is a system - transaction. System transactions include those - issued by time-to-live (TTL), column backfill, - etc. - """ - class ModType(proto.Enum): - r"""Mod type describes the type of change Spanner applied to the data. - For example, if the client submits an INSERT_OR_UPDATE request, - Spanner will perform an insert if there is no existing row and - return ModType INSERT. Alternatively, if there is an existing row, - Spanner will perform an update and return ModType UPDATE. - - Values: - MOD_TYPE_UNSPECIFIED (0): - Not specified. - INSERT (10): - Indicates data was inserted. - UPDATE (20): - Indicates existing data was updated. - DELETE (30): - Indicates existing data was deleted. - """ - MOD_TYPE_UNSPECIFIED = 0 - INSERT = 10 - UPDATE = 20 - DELETE = 30 - - class ValueCaptureType(proto.Enum): - r"""Value capture type describes which values are recorded in the - data change record. - - Values: - VALUE_CAPTURE_TYPE_UNSPECIFIED (0): - Not specified. - OLD_AND_NEW_VALUES (10): - Records both old and new values of the - modified watched columns. - NEW_VALUES (20): - Records only new values of the modified - watched columns. - NEW_ROW (30): - Records new values of all watched columns, - including modified and unmodified columns. - NEW_ROW_AND_OLD_VALUES (40): - Records the new values of all watched - columns, including modified and unmodified - columns. Also records the old values of the - modified columns. - """ - VALUE_CAPTURE_TYPE_UNSPECIFIED = 0 - OLD_AND_NEW_VALUES = 10 - NEW_VALUES = 20 - NEW_ROW = 30 - NEW_ROW_AND_OLD_VALUES = 40 - - class ColumnMetadata(proto.Message): - r"""Metadata for a column. - - Attributes: - name (str): - Name of the column. - type_ (google.cloud.spanner_v1.types.Type): - Type of the column. - is_primary_key (bool): - Indicates whether the column is a primary key - column. - ordinal_position (int): - Ordinal position of the column based on the - original table definition in the schema starting - with a value of 1. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: gs_type.Type = proto.Field( - proto.MESSAGE, - number=2, - message=gs_type.Type, - ) - is_primary_key: bool = proto.Field( - proto.BOOL, - number=3, - ) - ordinal_position: int = proto.Field( - proto.INT64, - number=4, - ) - - class ModValue(proto.Message): - r"""Returns the value and associated metadata for a particular field of - the - [Mod][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod]. - - Attributes: - column_metadata_index (int): - Index within the repeated - [column_metadata][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.column_metadata] - field, to obtain the column metadata for the column that was - modified. - value (google.protobuf.struct_pb2.Value): - The value of the column. - """ - - column_metadata_index: int = proto.Field( - proto.INT32, - number=1, - ) - value: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - - class Mod(proto.Message): - r"""A mod describes all data changes in a watched table row. - - Attributes: - keys (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): - Returns the value of the primary key of the - modified row. - old_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): - Returns the old values before the change for the modified - columns. Always empty for - [INSERT][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.INSERT], - or if old values are not being captured specified by - [value_capture_type][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType]. - new_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): - Returns the new values after the change for the modified - columns. Always empty for - [DELETE][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.DELETE]. - """ - - keys: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ChangeStreamRecord.DataChangeRecord.ModValue', - ) - old_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ChangeStreamRecord.DataChangeRecord.ModValue', - ) - new_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ChangeStreamRecord.DataChangeRecord.ModValue', - ) - - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - server_transaction_id: str = proto.Field( - proto.STRING, - number=3, - ) - is_last_record_in_transaction_in_partition: bool = proto.Field( - proto.BOOL, - number=4, - ) - table: str = proto.Field( - proto.STRING, - number=5, - ) - column_metadata: MutableSequence['ChangeStreamRecord.DataChangeRecord.ColumnMetadata'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ChangeStreamRecord.DataChangeRecord.ColumnMetadata', - ) - mods: MutableSequence['ChangeStreamRecord.DataChangeRecord.Mod'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ChangeStreamRecord.DataChangeRecord.Mod', - ) - mod_type: 'ChangeStreamRecord.DataChangeRecord.ModType' = proto.Field( - proto.ENUM, - number=8, - enum='ChangeStreamRecord.DataChangeRecord.ModType', - ) - value_capture_type: 'ChangeStreamRecord.DataChangeRecord.ValueCaptureType' = proto.Field( - proto.ENUM, - number=9, - enum='ChangeStreamRecord.DataChangeRecord.ValueCaptureType', - ) - number_of_records_in_transaction: int = proto.Field( - proto.INT32, - number=10, - ) - number_of_partitions_in_transaction: int = proto.Field( - proto.INT32, - number=11, - ) - transaction_tag: str = proto.Field( - proto.STRING, - number=12, - ) - is_system_transaction: bool = proto.Field( - proto.BOOL, - number=13, - ) - - class HeartbeatRecord(proto.Message): - r"""A heartbeat record is returned as a progress indicator, when - there are no data changes or any other partition record types in - the change stream partition. - - Attributes: - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Indicates the timestamp at which the query - has returned all the records in the change - stream partition with timestamp <= heartbeat - timestamp. The heartbeat timestamp will not be - the same as the timestamps of other record types - in the same partition. - """ - - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - - class PartitionStartRecord(proto.Message): - r"""A partition start record serves as a notification that the - client should schedule the partitions to be queried. - PartitionStartRecord returns information about one or more - partitions. - - Attributes: - start_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Start timestamp at which the partitions should be queried to - return change stream records with timestamps >= - start_timestamp. DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - partition_tokens (MutableSequence[str]): - Unique partition identifiers to be used in - queries. - """ - - start_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - partition_tokens: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - class PartitionEndRecord(proto.Message): - r"""A partition end record serves as a notification that the - client should stop reading the partition. No further records are - expected to be retrieved on it. - - Attributes: - end_timestamp (google.protobuf.timestamp_pb2.Timestamp): - End timestamp at which the change stream partition is - terminated. All changes generated by this partition will - have timestamps <= end_timestamp. - DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. PartitionEndRecord is the last record - returned for a partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - partition_token (str): - Unique partition identifier describing the terminated change - stream partition. - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.partition_token] - is equal to the partition token of the change stream - partition currently queried to return this - PartitionEndRecord. - """ - - end_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - partition_token: str = proto.Field( - proto.STRING, - number=3, - ) - - class PartitionEventRecord(proto.Message): - r"""A partition event record describes key range changes for a change - stream partition. The changes to a row defined by its primary key - can be captured in one change stream partition for a specific time - range, and then be captured in a different change stream partition - for a different time range. This movement of key ranges across - change stream partitions is a reflection of activities, such as - Spanner's dynamic splitting and load balancing, etc. Processing this - event is needed if users want to guarantee processing of the changes - for any key in timestamp order. If time ordered processing of - changes for a primary key is not needed, this event can be ignored. - To guarantee time ordered processing for each primary key, if the - event describes move-ins, the reader of this partition needs to wait - until the readers of the source partitions have processed all - records with timestamps <= this - PartitionEventRecord.commit_timestamp, before advancing beyond this - PartitionEventRecord. If the event describes move-outs, the reader - can notify the readers of the destination partitions that they can - continue processing. - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Indicates the commit timestamp at which the key range change - occurred. DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - partition_token (str): - Unique partition identifier describing the partition this - event occurred on. - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] - is equal to the partition token of the change stream - partition currently queried to return this - PartitionEventRecord. - move_in_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveInEvent]): - Set when one or more key ranges are moved into the change - stream partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - Example: Two key ranges are moved into partition (P1) from - partition (P2) and partition (P3) in a single transaction at - timestamp T. - - The PartitionEventRecord returned in P1 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P1" move_in_events { source_partition_token: "P2" } - move_in_events { source_partition_token: "P3" } } - - The PartitionEventRecord returned in P2 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P2" move_out_events { destination_partition_token: "P1" } } - - The PartitionEventRecord returned in P3 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P3" move_out_events { destination_partition_token: "P1" } } - move_out_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent]): - Set when one or more key ranges are moved out of the change - stream partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - Example: Two key ranges are moved out of partition (P1) to - partition (P2) and partition (P3) in a single transaction at - timestamp T. - - The PartitionEventRecord returned in P1 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P1" move_out_events { destination_partition_token: "P2" } - move_out_events { destination_partition_token: "P3" } } - - The PartitionEventRecord returned in P2 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P2" move_in_events { source_partition_token: "P1" } } - - The PartitionEventRecord returned in P3 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P3" move_in_events { source_partition_token: "P1" } } - """ - - class MoveInEvent(proto.Message): - r"""Describes move-in of the key ranges into the change stream partition - identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - To maintain processing the changes for a particular key in timestamp - order, the query processing the change stream partition identified - by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] - should not advance beyond the partition event record commit - timestamp until the queries processing the source change stream - partitions have processed all change stream records with timestamps - <= the partition event record commit timestamp. - - Attributes: - source_partition_token (str): - An unique partition identifier describing the - source change stream partition that recorded - changes for the key range that is moving into - this partition. - """ - - source_partition_token: str = proto.Field( - proto.STRING, - number=1, - ) - - class MoveOutEvent(proto.Message): - r"""Describes move-out of the key ranges out of the change stream - partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - To maintain processing the changes for a particular key in timestamp - order, the query processing the - [MoveOutEvent][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent] - in the partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] - should inform the queries processing the destination partitions that - they can unblock and proceed processing records past the - [commit_timestamp][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.commit_timestamp]. - - Attributes: - destination_partition_token (str): - An unique partition identifier describing the - destination change stream partition that will - record changes for the key range that is moving - out of this partition. - """ - - destination_partition_token: str = proto.Field( - proto.STRING, - number=1, - ) - - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - partition_token: str = proto.Field( - proto.STRING, - number=3, - ) - move_in_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveInEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ChangeStreamRecord.PartitionEventRecord.MoveInEvent', - ) - move_out_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveOutEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='ChangeStreamRecord.PartitionEventRecord.MoveOutEvent', - ) - - data_change_record: DataChangeRecord = proto.Field( - proto.MESSAGE, - number=1, - oneof='record', - message=DataChangeRecord, - ) - heartbeat_record: HeartbeatRecord = proto.Field( - proto.MESSAGE, - number=2, - oneof='record', - message=HeartbeatRecord, - ) - partition_start_record: PartitionStartRecord = proto.Field( - proto.MESSAGE, - number=3, - oneof='record', - message=PartitionStartRecord, - ) - partition_end_record: PartitionEndRecord = proto.Field( - proto.MESSAGE, - number=4, - oneof='record', - message=PartitionEndRecord, - ) - partition_event_record: PartitionEventRecord = proto.Field( - proto.MESSAGE, - number=5, - oneof='record', - message=PartitionEventRecord, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py deleted file mode 100644 index 3771ef1f71..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import transaction -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'CommitResponse', - }, -) - - -class CommitResponse(proto.Message): - r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - The Cloud Spanner timestamp at which the - transaction committed. - commit_stats (google.cloud.spanner_v1.types.CommitResponse.CommitStats): - The statistics about this ``Commit``. Not returned by - default. For more information, see - [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - If specified, transaction has not committed - yet. You must retry the commit with the new - precommit token. - - This field is a member of `oneof`_ ``MultiplexedSessionRetry``. - snapshot_timestamp (google.protobuf.timestamp_pb2.Timestamp): - If ``TransactionOptions.isolation_level`` is set to - ``IsolationLevel.REPEATABLE_READ``, then the snapshot - timestamp is the timestamp at which all reads in the - transaction ran. This timestamp is never returned. - """ - - class CommitStats(proto.Message): - r"""Additional statistics about a commit. - - Attributes: - mutation_count (int): - The total number of mutations for the transaction. Knowing - the ``mutation_count`` value can help you maximize the - number of mutations in a transaction and minimize the number - of API round trips. You can also monitor this value to - prevent transactions from exceeding the system - `limit `__. - If the number of mutations exceeds the limit, the server - returns - `INVALID_ARGUMENT `__. - """ - - mutation_count: int = proto.Field( - proto.INT64, - number=1, - ) - - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - commit_stats: CommitStats = proto.Field( - proto.MESSAGE, - number=2, - message=CommitStats, - ) - precommit_token: transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=4, - oneof='MultiplexedSessionRetry', - message=transaction.MultiplexedSessionPrecommitToken, - ) - snapshot_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py deleted file mode 100644 index 54f745c9b7..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py +++ /dev/null @@ -1,248 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'KeyRange', - 'KeySet', - }, -) - - -class KeyRange(proto.Message): - r"""KeyRange represents a range of rows in a table or index. - - A range has a start key and an end key. These keys can be open or - closed, indicating if the range includes rows with that key. - - Keys are represented by lists, where the ith value in the list - corresponds to the ith component of the table or index primary key. - Individual values are encoded as described - [here][google.spanner.v1.TypeCode]. - - For example, consider the following table definition: - - :: - - CREATE TABLE UserEvents ( - UserName STRING(MAX), - EventDate STRING(10) - ) PRIMARY KEY(UserName, EventDate); - - The following keys name rows in this table: - - :: - - ["Bob", "2014-09-23"] - ["Alfred", "2015-06-12"] - - Since the ``UserEvents`` table's ``PRIMARY KEY`` clause names two - columns, each ``UserEvents`` key has two elements; the first is the - ``UserName``, and the second is the ``EventDate``. - - Key ranges with multiple components are interpreted - lexicographically by component using the table or index key's - declared sort order. For example, the following range returns all - events for user ``"Bob"`` that occurred in the year 2015: - - :: - - "start_closed": ["Bob", "2015-01-01"] - "end_closed": ["Bob", "2015-12-31"] - - Start and end keys can omit trailing key components. This affects - the inclusion and exclusion of rows that exactly match the provided - key components: if the key is closed, then rows that exactly match - the provided components are included; if the key is open, then rows - that exactly match are not included. - - For example, the following range includes all events for ``"Bob"`` - that occurred during and after the year 2000: - - :: - - "start_closed": ["Bob", "2000-01-01"] - "end_closed": ["Bob"] - - The next example retrieves all events for ``"Bob"``: - - :: - - "start_closed": ["Bob"] - "end_closed": ["Bob"] - - To retrieve events before the year 2000: - - :: - - "start_closed": ["Bob"] - "end_open": ["Bob", "2000-01-01"] - - The following range includes all rows in the table: - - :: - - "start_closed": [] - "end_closed": [] - - This range returns all users whose ``UserName`` begins with any - character from A to C: - - :: - - "start_closed": ["A"] - "end_open": ["D"] - - This range returns all users whose ``UserName`` begins with B: - - :: - - "start_closed": ["B"] - "end_open": ["C"] - - Key ranges honor column sort order. For example, suppose a table is - defined as follows: - - :: - - CREATE TABLE DescendingSortedTable { - Key INT64, - ... - ) PRIMARY KEY(Key DESC); - - The following range retrieves all rows with key values between 1 and - 100 inclusive: - - :: - - "start_closed": ["100"] - "end_closed": ["1"] - - Note that 100 is passed as the start, and 1 is passed as the end, - because ``Key`` is a descending column in the schema. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - start_closed (google.protobuf.struct_pb2.ListValue): - If the start is closed, then the range includes all rows - whose first ``len(start_closed)`` key columns exactly match - ``start_closed``. - - This field is a member of `oneof`_ ``start_key_type``. - start_open (google.protobuf.struct_pb2.ListValue): - If the start is open, then the range excludes rows whose - first ``len(start_open)`` key columns exactly match - ``start_open``. - - This field is a member of `oneof`_ ``start_key_type``. - end_closed (google.protobuf.struct_pb2.ListValue): - If the end is closed, then the range includes all rows whose - first ``len(end_closed)`` key columns exactly match - ``end_closed``. - - This field is a member of `oneof`_ ``end_key_type``. - end_open (google.protobuf.struct_pb2.ListValue): - If the end is open, then the range excludes rows whose first - ``len(end_open)`` key columns exactly match ``end_open``. - - This field is a member of `oneof`_ ``end_key_type``. - """ - - start_closed: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=1, - oneof='start_key_type', - message=struct_pb2.ListValue, - ) - start_open: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=2, - oneof='start_key_type', - message=struct_pb2.ListValue, - ) - end_closed: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=3, - oneof='end_key_type', - message=struct_pb2.ListValue, - ) - end_open: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=4, - oneof='end_key_type', - message=struct_pb2.ListValue, - ) - - -class KeySet(proto.Message): - r"""``KeySet`` defines a collection of Cloud Spanner keys and/or key - ranges. All the keys are expected to be in the same table or index. - The keys need not be sorted in any particular way. - - If the same key is specified multiple times in the set (for example - if two ranges, two keys, or a key and a range overlap), Cloud - Spanner behaves as if the key were only specified once. - - Attributes: - keys (MutableSequence[google.protobuf.struct_pb2.ListValue]): - A list of specific keys. Entries in ``keys`` should have - exactly as many elements as there are columns in the primary - or index key with which this ``KeySet`` is used. Individual - key values are encoded as described - [here][google.spanner.v1.TypeCode]. - ranges (MutableSequence[google.cloud.spanner_v1.types.KeyRange]): - A list of key ranges. See - [KeyRange][google.spanner.v1.KeyRange] for more information - about key range specifications. - all_ (bool): - For convenience ``all`` can be set to ``true`` to indicate - that this ``KeySet`` matches all keys in the table or index. - Note that any keys specified in ``keys`` or ``ranges`` are - only yielded once. - """ - - keys: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.ListValue, - ) - ranges: MutableSequence['KeyRange'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='KeyRange', - ) - all_: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py deleted file mode 100644 index 0d4f7b4466..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py +++ /dev/null @@ -1,201 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import keys -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'Mutation', - }, -) - - -class Mutation(proto.Message): - r"""A modification to one or more Cloud Spanner rows. Mutations can be - applied to a Cloud Spanner database by sending them in a - [Commit][google.spanner.v1.Spanner.Commit] call. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - insert (google.cloud.spanner_v1.types.Mutation.Write): - Insert new rows in a table. If any of the rows already - exist, the write or transaction fails with error - ``ALREADY_EXISTS``. - - This field is a member of `oneof`_ ``operation``. - update (google.cloud.spanner_v1.types.Mutation.Write): - Update existing rows in a table. If any of the rows does not - already exist, the transaction fails with error - ``NOT_FOUND``. - - This field is a member of `oneof`_ ``operation``. - insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): - Like [insert][google.spanner.v1.Mutation.insert], except - that if the row already exists, then its column values are - overwritten with the ones provided. Any column values not - explicitly written are preserved. - - When using - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], - just as when using - [insert][google.spanner.v1.Mutation.insert], all - ``NOT NULL`` columns in the table must be given a value. - This holds true even when the row already exists and will - therefore actually be updated. - - This field is a member of `oneof`_ ``operation``. - replace (google.cloud.spanner_v1.types.Mutation.Write): - Like [insert][google.spanner.v1.Mutation.insert], except - that if the row already exists, it is deleted, and the - column values provided are inserted instead. Unlike - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], - this means any values not explicitly written become - ``NULL``. - - In an interleaved table, if you create the child table with - the ``ON DELETE CASCADE`` annotation, then replacing a - parent row also deletes the child rows. Otherwise, you must - delete the child rows before you replace the parent row. - - This field is a member of `oneof`_ ``operation``. - delete (google.cloud.spanner_v1.types.Mutation.Delete): - Delete rows from a table. Succeeds whether or - not the named rows were present. - - This field is a member of `oneof`_ ``operation``. - """ - - class Write(proto.Message): - r"""Arguments to [insert][google.spanner.v1.Mutation.insert], - [update][google.spanner.v1.Mutation.update], - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and - [replace][google.spanner.v1.Mutation.replace] operations. - - Attributes: - table (str): - Required. The table whose rows will be - written. - columns (MutableSequence[str]): - The names of the columns in - [table][google.spanner.v1.Mutation.Write.table] to be - written. - - The list of columns must contain enough columns to allow - Cloud Spanner to derive values for all primary key columns - in the row(s) to be modified. - values (MutableSequence[google.protobuf.struct_pb2.ListValue]): - The values to be written. ``values`` can contain more than - one list of values. If it does, then multiple rows are - written, one for each entry in ``values``. Each list in - ``values`` must have exactly as many entries as there are - entries in - [columns][google.spanner.v1.Mutation.Write.columns] above. - Sending multiple lists is equivalent to sending multiple - ``Mutation``\ s, each containing one ``values`` entry and - repeating [table][google.spanner.v1.Mutation.Write.table] - and [columns][google.spanner.v1.Mutation.Write.columns]. - Individual values in each list are encoded as described - [here][google.spanner.v1.TypeCode]. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - columns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - values: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=struct_pb2.ListValue, - ) - - class Delete(proto.Message): - r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. - - Attributes: - table (str): - Required. The table whose rows will be - deleted. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. The primary keys of the rows within - [table][google.spanner.v1.Mutation.Delete.table] to delete. - The primary keys must be specified in the order in which - they appear in the ``PRIMARY KEY()`` clause of the table's - equivalent DDL statement (the DDL statement used to create - the table). Delete is idempotent. The transaction will - succeed even if some or all rows do not exist. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - key_set: keys.KeySet = proto.Field( - proto.MESSAGE, - number=2, - message=keys.KeySet, - ) - - insert: Write = proto.Field( - proto.MESSAGE, - number=1, - oneof='operation', - message=Write, - ) - update: Write = proto.Field( - proto.MESSAGE, - number=2, - oneof='operation', - message=Write, - ) - insert_or_update: Write = proto.Field( - proto.MESSAGE, - number=3, - oneof='operation', - message=Write, - ) - replace: Write = proto.Field( - proto.MESSAGE, - number=4, - oneof='operation', - message=Write, - ) - delete: Delete = proto.Field( - proto.MESSAGE, - number=5, - oneof='operation', - message=Delete, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py deleted file mode 100644 index 26e730d2d0..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py +++ /dev/null @@ -1,219 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'PlanNode', - 'QueryPlan', - }, -) - - -class PlanNode(proto.Message): - r"""Node information for nodes appearing in a - [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. - - Attributes: - index (int): - The ``PlanNode``'s index in [node - list][google.spanner.v1.QueryPlan.plan_nodes]. - kind (google.cloud.spanner_v1.types.PlanNode.Kind): - Used to determine the type of node. May be needed for - visualizing different kinds of nodes differently. For - example, If the node is a - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it - will have a condensed representation which can be used to - directly embed a description of the node in its parent. - display_name (str): - The display name for the node. - child_links (MutableSequence[google.cloud.spanner_v1.types.PlanNode.ChildLink]): - List of child node ``index``\ es and their relationship to - this parent. - short_representation (google.cloud.spanner_v1.types.PlanNode.ShortRepresentation): - Condensed representation for - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. - metadata (google.protobuf.struct_pb2.Struct): - Attributes relevant to the node contained in a group of - key-value pairs. For example, a Parameter Reference node - could have the following information in its metadata: - - :: - - { - "parameter_reference": "param1", - "parameter_type": "array" - } - execution_stats (google.protobuf.struct_pb2.Struct): - The execution statistics associated with the - node, contained in a group of key-value pairs. - Only present if the plan was returned as a - result of a profile query. For example, number - of executions, number of rows/time per execution - etc. - """ - class Kind(proto.Enum): - r"""The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes - between the two different kinds of nodes that can appear in a query - plan. - - Values: - KIND_UNSPECIFIED (0): - Not specified. - RELATIONAL (1): - Denotes a Relational operator node in the expression tree. - Relational operators represent iterative processing of rows - during query execution. For example, a ``TableScan`` - operation that reads rows from a table. - SCALAR (2): - Denotes a Scalar node in the expression tree. - Scalar nodes represent non-iterable entities in - the query plan. For example, constants or - arithmetic operators appearing inside predicate - expressions or references to column names. - """ - KIND_UNSPECIFIED = 0 - RELATIONAL = 1 - SCALAR = 2 - - class ChildLink(proto.Message): - r"""Metadata associated with a parent-child relationship appearing in a - [PlanNode][google.spanner.v1.PlanNode]. - - Attributes: - child_index (int): - The node to which the link points. - type_ (str): - The type of the link. For example, in Hash - Joins this could be used to distinguish between - the build child and the probe child, or in the - case of the child being an output variable, to - represent the tag associated with the output - variable. - variable (str): - Only present if the child node is - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and - corresponds to an output variable of the parent node. The - field carries the name of the output variable. For example, - a ``TableScan`` operator that reads rows from a table will - have child links to the ``SCALAR`` nodes representing the - output variables created for each column that is read by the - operator. The corresponding ``variable`` fields will be set - to the variable names assigned to the columns. - """ - - child_index: int = proto.Field( - proto.INT32, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - variable: str = proto.Field( - proto.STRING, - number=3, - ) - - class ShortRepresentation(proto.Message): - r"""Condensed representation of a node and its subtree. Only present for - ``SCALAR`` [PlanNode(s)][google.spanner.v1.PlanNode]. - - Attributes: - description (str): - A string representation of the expression - subtree rooted at this node. - subqueries (MutableMapping[str, int]): - A mapping of (subquery variable name) -> (subquery node id) - for cases where the ``description`` string of this node - references a ``SCALAR`` subquery contained in the expression - subtree rooted at this node. The referenced ``SCALAR`` - subquery may not necessarily be a direct child of this node. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - ) - subqueries: MutableMapping[str, int] = proto.MapField( - proto.STRING, - proto.INT32, - number=2, - ) - - index: int = proto.Field( - proto.INT32, - number=1, - ) - kind: Kind = proto.Field( - proto.ENUM, - number=2, - enum=Kind, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - child_links: MutableSequence[ChildLink] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=ChildLink, - ) - short_representation: ShortRepresentation = proto.Field( - proto.MESSAGE, - number=5, - message=ShortRepresentation, - ) - metadata: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - execution_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=7, - message=struct_pb2.Struct, - ) - - -class QueryPlan(proto.Message): - r"""Contains an ordered list of nodes appearing in the query - plan. - - Attributes: - plan_nodes (MutableSequence[google.cloud.spanner_v1.types.PlanNode]): - The nodes in the query plan. Plan nodes are returned in - pre-order starting with the plan root. Each - [PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds - to its index in ``plan_nodes``. - """ - - plan_nodes: MutableSequence['PlanNode'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PlanNode', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py deleted file mode 100644 index 9503d0c172..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py +++ /dev/null @@ -1,379 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import query_plan as gs_query_plan -from google.cloud.spanner_v1.types import transaction as gs_transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'ResultSet', - 'PartialResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - }, -) - - -class ResultSet(proto.Message): - r"""Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Attributes: - metadata (google.cloud.spanner_v1.types.ResultSetMetadata): - Metadata about the result set, such as row - type information. - rows (MutableSequence[google.protobuf.struct_pb2.ListValue]): - Each element in ``rows`` is a row whose format is defined by - [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. - The ith element in each row matches the ith field in - [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. - Elements are encoded based on type as described - [here][google.spanner.v1.TypeCode]. - stats (google.cloud.spanner_v1.types.ResultSetStats): - Query plan and execution statistics for the SQL statement - that produced this result set. These can be requested by - setting - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - DML statements always produce stats containing the number of - rows modified, unless executed using the - [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - Other fields might or might not be populated, based on the - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token is included if the read-write - transaction is on a multiplexed session. Pass the precommit - token with the highest sequence number from this transaction - attempt to the [Commit][google.spanner.v1.Spanner.Commit] - request for this transaction. - """ - - metadata: 'ResultSetMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='ResultSetMetadata', - ) - rows: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct_pb2.ListValue, - ) - stats: 'ResultSetStats' = proto.Field( - proto.MESSAGE, - number=3, - message='ResultSetStats', - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=5, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - - -class PartialResultSet(proto.Message): - r"""Partial results from a streaming read or SQL query. Streaming - reads and SQL queries better tolerate large result sets, large - rows, and large values, but are a little trickier to consume. - - Attributes: - metadata (google.cloud.spanner_v1.types.ResultSetMetadata): - Metadata about the result set, such as row - type information. Only present in the first - response. - values (MutableSequence[google.protobuf.struct_pb2.Value]): - A streamed result set consists of a stream of values, which - might be split into many ``PartialResultSet`` messages to - accommodate large rows and/or large values. Every N complete - values defines a row, where N is equal to the number of - entries in - [metadata.row_type.fields][google.spanner.v1.StructType.fields]. - - Most values are encoded based on type as described - [here][google.spanner.v1.TypeCode]. - - It's possible that the last value in values is "chunked", - meaning that the rest of the value is sent in subsequent - ``PartialResultSet``\ (s). This is denoted by the - [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] - field. Two or more chunked values can be merged to form a - complete value as follows: - - - ``bool/number/null``: can't be chunked - - ``string``: concatenate the strings - - ``list``: concatenate the lists. If the last element in a - list is a ``string``, ``list``, or ``object``, merge it - with the first element in the next list by applying these - rules recursively. - - ``object``: concatenate the (field name, field value) - pairs. If a field name is duplicated, then apply these - rules recursively to merge the field values. - - Some examples of merging: - - :: - - Strings are concatenated. - "foo", "bar" => "foobar" - - Lists of non-strings are concatenated. - [2, 3], [4] => [2, 3, 4] - - Lists are concatenated, but the last and first elements are merged - because they are strings. - ["a", "b"], ["c", "d"] => ["a", "bc", "d"] - - Lists are concatenated, but the last and first elements are merged - because they are lists. Recursively, the last and first elements - of the inner lists are merged because they are strings. - ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] - - Non-overlapping object fields are combined. - {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} - - Overlapping object fields are merged. - {"a": "1"}, {"a": "2"} => {"a": "12"} - - Examples of merging objects containing lists of strings. - {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} - - For a more complete example, suppose a streaming SQL query - is yielding a result set whose rows contain a single string - field. The following ``PartialResultSet``\ s might be - yielded: - - :: - - { - "metadata": { ... } - "values": ["Hello", "W"] - "chunked_value": true - "resume_token": "Af65..." - } - { - "values": ["orl"] - "chunked_value": true - } - { - "values": ["d"] - "resume_token": "Zx1B..." - } - - This sequence of ``PartialResultSet``\ s encodes two rows, - one containing the field value ``"Hello"``, and a second - containing the field value ``"World" = "W" + "orl" + "d"``. - - Not all ``PartialResultSet``\ s contain a ``resume_token``. - Execution can only be resumed from a previously yielded - ``resume_token``. For the above sequence of - ``PartialResultSet``\ s, resuming the query with - ``"resume_token": "Af65..."`` yields results from the - ``PartialResultSet`` with value "orl". - chunked_value (bool): - If true, then the final value in - [values][google.spanner.v1.PartialResultSet.values] is - chunked, and must be combined with more values from - subsequent ``PartialResultSet``\ s to obtain a complete - field value. - resume_token (bytes): - Streaming calls might be interrupted for a variety of - reasons, such as TCP connection loss. If this occurs, the - stream of results can be resumed by re-sending the original - request and including ``resume_token``. Note that executing - any other transaction in the same session invalidates the - token. - stats (google.cloud.spanner_v1.types.ResultSetStats): - Query plan and execution statistics for the statement that - produced this streaming result set. These can be requested - by setting - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] - and are sent only once with the last response in the stream. - This field is also present in the last response for DML - statements. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token is included if the read-write - transaction has multiplexed sessions enabled. Pass the - precommit token with the highest sequence number from this - transaction attempt to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. - last (bool): - Optional. Indicates whether this is the last - ``PartialResultSet`` in the stream. The server might - optionally set this field. Clients shouldn't rely on this - field being set in all cases. - """ - - metadata: 'ResultSetMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='ResultSetMetadata', - ) - values: MutableSequence[struct_pb2.Value] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - chunked_value: bool = proto.Field( - proto.BOOL, - number=3, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=4, - ) - stats: 'ResultSetStats' = proto.Field( - proto.MESSAGE, - number=5, - message='ResultSetStats', - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=8, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - last: bool = proto.Field( - proto.BOOL, - number=9, - ) - - -class ResultSetMetadata(proto.Message): - r"""Metadata about a [ResultSet][google.spanner.v1.ResultSet] or - [PartialResultSet][google.spanner.v1.PartialResultSet]. - - Attributes: - row_type (google.cloud.spanner_v1.types.StructType): - Indicates the field names and types for the rows in the - result set. For example, a SQL query like - ``"SELECT UserId, UserName FROM Users"`` could return a - ``row_type`` value like: - - :: - - "fields": [ - { "name": "UserId", "type": { "code": "INT64" } }, - { "name": "UserName", "type": { "code": "STRING" } }, - ] - transaction (google.cloud.spanner_v1.types.Transaction): - If the read or SQL query began a transaction - as a side-effect, the information about the new - transaction is yielded here. - undeclared_parameters (google.cloud.spanner_v1.types.StructType): - A SQL query can be parameterized. In PLAN mode, these - parameters can be undeclared. This indicates the field names - and types for those undeclared parameters in the SQL query. - For example, a SQL query like - ``"SELECT * FROM Users where UserId = @userId and UserName = @userName "`` - could return a ``undeclared_parameters`` value like: - - :: - - "fields": [ - { "name": "UserId", "type": { "code": "INT64" } }, - { "name": "UserName", "type": { "code": "STRING" } }, - ] - """ - - row_type: gs_type.StructType = proto.Field( - proto.MESSAGE, - number=1, - message=gs_type.StructType, - ) - transaction: gs_transaction.Transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.Transaction, - ) - undeclared_parameters: gs_type.StructType = proto.Field( - proto.MESSAGE, - number=3, - message=gs_type.StructType, - ) - - -class ResultSetStats(proto.Message): - r"""Additional statistics about a - [ResultSet][google.spanner.v1.ResultSet] or - [PartialResultSet][google.spanner.v1.PartialResultSet]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_plan (google.cloud.spanner_v1.types.QueryPlan): - [QueryPlan][google.spanner.v1.QueryPlan] for the query - associated with this result. - query_stats (google.protobuf.struct_pb2.Struct): - Aggregated statistics from the execution of the query. Only - present when the query is profiled. For example, a query - could return the statistics as follows: - - :: - - { - "rows_returned": "3", - "elapsed_time": "1.22 secs", - "cpu_time": "1.19 secs" - } - row_count_exact (int): - Standard DML returns an exact count of rows - that were modified. - - This field is a member of `oneof`_ ``row_count``. - row_count_lower_bound (int): - Partitioned DML doesn't offer exactly-once - semantics, so it returns a lower bound of the - rows modified. - - This field is a member of `oneof`_ ``row_count``. - """ - - query_plan: gs_query_plan.QueryPlan = proto.Field( - proto.MESSAGE, - number=1, - message=gs_query_plan.QueryPlan, - ) - query_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - row_count_exact: int = proto.Field( - proto.INT64, - number=3, - oneof='row_count', - ) - row_count_lower_bound: int = proto.Field( - proto.INT64, - number=4, - oneof='row_count', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py deleted file mode 100644 index 2742de8269..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py +++ /dev/null @@ -1,1782 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import keys -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import transaction as gs_transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'CreateSessionRequest', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'Session', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'DeleteSessionRequest', - 'RequestOptions', - 'DirectedReadOptions', - 'ExecuteSqlRequest', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'Partition', - 'PartitionResponse', - 'ReadRequest', - 'BeginTransactionRequest', - 'CommitRequest', - 'RollbackRequest', - 'BatchWriteRequest', - 'BatchWriteResponse', - }, -) - - -class CreateSessionRequest(proto.Message): - r"""The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - - Attributes: - database (str): - Required. The database in which the new - session is created. - session (google.cloud.spanner_v1.types.Session): - Required. The session to create. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - session: 'Session' = proto.Field( - proto.MESSAGE, - number=2, - message='Session', - ) - - -class BatchCreateSessionsRequest(proto.Message): - r"""The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - Attributes: - database (str): - Required. The database in which the new - sessions are created. - session_template (google.cloud.spanner_v1.types.Session): - Parameters to apply to each created session. - session_count (int): - Required. The number of sessions to be created in this batch - call. The API can return fewer than the requested number of - sessions. If a specific number of sessions are desired, the - client can make additional calls to ``BatchCreateSessions`` - (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - session_template: 'Session' = proto.Field( - proto.MESSAGE, - number=2, - message='Session', - ) - session_count: int = proto.Field( - proto.INT32, - number=3, - ) - - -class BatchCreateSessionsResponse(proto.Message): - r"""The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - Attributes: - session (MutableSequence[google.cloud.spanner_v1.types.Session]): - The freshly created sessions. - """ - - session: MutableSequence['Session'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Session', - ) - - -class Session(proto.Message): - r"""A session in the Cloud Spanner API. - - Attributes: - name (str): - Output only. The name of the session. This is - always system-assigned. - labels (MutableMapping[str, str]): - The labels for the session. - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 64 labels can be associated with a given - session. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the session - is created. - approximate_last_use_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The approximate timestamp when - the session is last used. It's typically earlier - than the actual last use time. - creator_role (str): - The database role which created this session. - multiplexed (bool): - Optional. If ``true``, specifies a multiplexed session. Use - a multiplexed session for multiple, concurrent read-only - operations. Don't use them for read-write transactions, - partitioned reads, or partitioned queries. Use - [``sessions.create``][google.spanner.v1.Spanner.CreateSession] - to create multiplexed sessions. Don't use - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions] - to create a multiplexed session. You can't delete or list - multiplexed sessions. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - approximate_last_use_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - creator_role: str = proto.Field( - proto.STRING, - number=5, - ) - multiplexed: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class GetSessionRequest(proto.Message): - r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. - - Attributes: - name (str): - Required. The name of the session to - retrieve. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSessionsRequest(proto.Message): - r"""The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Attributes: - database (str): - Required. The database in which to list - sessions. - page_size (int): - Number of sessions to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] - from a previous - [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. - filter (str): - An expression for filtering the results of the request. - Filter rules are case insensitive. The fields eligible for - filtering are: - - - ``labels.key`` where key is the name of a label - - Some examples of using filters are: - - - ``labels.env:*`` --> The session has the label "env". - - ``labels.env:dev`` --> The session has the label "env" and - the value of the label contains the string "dev". - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSessionsResponse(proto.Message): - r"""The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Attributes: - sessions (MutableSequence[google.cloud.spanner_v1.types.Session]): - The list of requested sessions. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListSessions][google.spanner.v1.Spanner.ListSessions] call - to fetch more of the matching sessions. - """ - - @property - def raw_page(self): - return self - - sessions: MutableSequence['Session'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Session', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteSessionRequest(proto.Message): - r"""The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - - Attributes: - name (str): - Required. The name of the session to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RequestOptions(proto.Message): - r"""Common request options for various APIs. - - Attributes: - priority (google.cloud.spanner_v1.types.RequestOptions.Priority): - Priority for the request. - request_tag (str): - A per-request tag which can be applied to queries or reads, - used for statistics collection. Both ``request_tag`` and - ``transaction_tag`` can be specified for a read or query - that belongs to a transaction. This field is ignored for - requests where it's not applicable (for example, - ``CommitRequest``). Legal characters for ``request_tag`` - values are all printable characters (ASCII 32 - 126) and the - length of a request_tag is limited to 50 characters. Values - that exceed this limit are truncated. Any leading underscore - (\_) characters are removed from the string. - transaction_tag (str): - A tag used for statistics collection about this transaction. - Both ``request_tag`` and ``transaction_tag`` can be - specified for a read or query that belongs to a transaction. - The value of transaction_tag should be the same for all - requests belonging to the same transaction. If this request - doesn't belong to any transaction, ``transaction_tag`` is - ignored. Legal characters for ``transaction_tag`` values are - all printable characters (ASCII 32 - 126) and the length of - a ``transaction_tag`` is limited to 50 characters. Values - that exceed this limit are truncated. Any leading underscore - (\_) characters are removed from the string. - """ - class Priority(proto.Enum): - r"""The relative priority for requests. Note that priority isn't - applicable for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - - The priority acts as a hint to the Cloud Spanner scheduler and - doesn't guarantee priority or order of execution. For example: - - - Some parts of a write operation always execute at - ``PRIORITY_HIGH``, regardless of the specified priority. This can - cause you to see an increase in high priority workload even when - executing a low priority request. This can also potentially cause - a priority inversion where a lower priority request is fulfilled - ahead of a higher priority request. - - If a transaction contains multiple operations with different - priorities, Cloud Spanner doesn't guarantee to process the higher - priority operations first. There might be other constraints to - satisfy, such as the order of operations. - - Values: - PRIORITY_UNSPECIFIED (0): - ``PRIORITY_UNSPECIFIED`` is equivalent to ``PRIORITY_HIGH``. - PRIORITY_LOW (1): - This specifies that the request is low - priority. - PRIORITY_MEDIUM (2): - This specifies that the request is medium - priority. - PRIORITY_HIGH (3): - This specifies that the request is high - priority. - """ - PRIORITY_UNSPECIFIED = 0 - PRIORITY_LOW = 1 - PRIORITY_MEDIUM = 2 - PRIORITY_HIGH = 3 - - priority: Priority = proto.Field( - proto.ENUM, - number=1, - enum=Priority, - ) - request_tag: str = proto.Field( - proto.STRING, - number=2, - ) - transaction_tag: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DirectedReadOptions(proto.Message): - r"""The ``DirectedReadOptions`` can be used to indicate which replicas - or regions should be used for non-transactional reads or queries. - - ``DirectedReadOptions`` can only be specified for a read-only - transaction, otherwise the API returns an ``INVALID_ARGUMENT`` - error. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - include_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.IncludeReplicas): - ``Include_replicas`` indicates the order of replicas (as - they appear in this list) to process the request. If - ``auto_failover_disabled`` is set to ``true`` and all - replicas are exhausted without finding a healthy replica, - Spanner waits for a replica in the list to become available, - requests might fail due to ``DEADLINE_EXCEEDED`` errors. - - This field is a member of `oneof`_ ``replicas``. - exclude_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.ExcludeReplicas): - ``Exclude_replicas`` indicates that specified replicas - should be excluded from serving requests. Spanner doesn't - route requests to the replicas in this list. - - This field is a member of `oneof`_ ``replicas``. - """ - - class ReplicaSelection(proto.Message): - r"""The directed read replica selector. Callers must provide one or more - of the following fields for replica selection: - - - ``location`` - The location must be one of the regions within the - multi-region configuration of your database. - - ``type`` - The type of the replica. - - Some examples of using replica_selectors are: - - - ``location:us-east1`` --> The "us-east1" replica(s) of any - available type is used to process the request. - - ``type:READ_ONLY`` --> The "READ_ONLY" type replica(s) in the - nearest available location are used to process the request. - - ``location:us-east1 type:READ_ONLY`` --> The "READ_ONLY" type - replica(s) in location "us-east1" is used to process the request. - - Attributes: - location (str): - The location or region of the serving - requests, for example, "us-east1". - type_ (google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection.Type): - The type of replica. - """ - class Type(proto.Enum): - r"""Indicates the type of replica. - - Values: - TYPE_UNSPECIFIED (0): - Not specified. - READ_WRITE (1): - Read-write replicas support both reads and - writes. - READ_ONLY (2): - Read-only replicas only support reads (not - writes). - """ - TYPE_UNSPECIFIED = 0 - READ_WRITE = 1 - READ_ONLY = 2 - - location: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DirectedReadOptions.ReplicaSelection.Type' = proto.Field( - proto.ENUM, - number=2, - enum='DirectedReadOptions.ReplicaSelection.Type', - ) - - class IncludeReplicas(proto.Message): - r"""An ``IncludeReplicas`` contains a repeated set of - ``ReplicaSelection`` which indicates the order in which replicas - should be considered. - - Attributes: - replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): - The directed read replica selector. - auto_failover_disabled (bool): - If ``true``, Spanner doesn't route requests to a replica - outside the <``include_replicas`` list when all of the - specified replicas are unavailable or unhealthy. Default - value is ``false``. - """ - - replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DirectedReadOptions.ReplicaSelection', - ) - auto_failover_disabled: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class ExcludeReplicas(proto.Message): - r"""An ExcludeReplicas contains a repeated set of - ReplicaSelection that should be excluded from serving requests. - - Attributes: - replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): - The directed read replica selector. - """ - - replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DirectedReadOptions.ReplicaSelection', - ) - - include_replicas: IncludeReplicas = proto.Field( - proto.MESSAGE, - number=1, - oneof='replicas', - message=IncludeReplicas, - ) - exclude_replicas: ExcludeReplicas = proto.Field( - proto.MESSAGE, - number=2, - oneof='replicas', - message=ExcludeReplicas, - ) - - -class ExecuteSqlRequest(proto.Message): - r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] - and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - - Attributes: - session (str): - Required. The session in which the SQL query - should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - The transaction to use. - - For queries, if none is provided, the default is - a temporary read-only transaction with strong - concurrency. - - Standard DML statements require a read-write - transaction. To protect against replays, - single-use transactions are not supported. The - caller must either supply an existing - transaction ID or begin a new transaction. - - Partitioned DML requires an existing Partitioned - DML transaction ID. - sql (str): - Required. The SQL string. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - SQL string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names must conform to the naming - requirements of identifiers as specified at - https://cloud.google.com/spanner/docs/lexical#identifiers. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It's an error to execute a SQL statement with unbound - parameters. - param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): - It isn't always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON - strings. - - In these cases, you can use ``param_types`` to specify the - exact SQL type for some or all of the SQL statement - parameters. See the definition of - [Type][google.spanner.v1.Type] for more information about - SQL types. - resume_token (bytes): - If this request is resuming a previously interrupted SQL - statement execution, ``resume_token`` should be copied from - the last - [PartialResultSet][google.spanner.v1.PartialResultSet] - yielded before the interruption. Doing this enables the new - SQL statement execution to resume where the last one left - off. The rest of the request parameters must exactly match - the request that yielded this token. - query_mode (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode): - Used to control the amount of debugging information returned - in [ResultSetStats][google.spanner.v1.ResultSetStats]. If - [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] - is set, - [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] - can only be set to - [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. - partition_token (bytes): - If present, results are restricted to the specified - partition previously created using ``PartitionQuery``. There - must be an exact match for the values of fields common to - this message and the ``PartitionQueryRequest`` message used - to create this ``partition_token``. - seqno (int): - A per-transaction sequence number used to - identify this request. This field makes each - request idempotent such that if the request is - received multiple times, at most one succeeds. - - The sequence number must be monotonically - increasing within the transaction. If a request - arrives for the first time with an out-of-order - sequence number, the transaction can be aborted. - Replays of previously handled requests yield the - same response as the first execution. - - Required for DML statements. Ignored for - queries. - query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): - Query optimizer configuration to use for the - given query. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): - Directed read options for this request. - data_boost_enabled (bool): - If this is for a partitioned query and this field is set to - ``true``, the request is executed with Spanner Data Boost - independent compute resources. - - If the field is set to ``true`` but the request doesn't set - ``partition_token``, the API returns an ``INVALID_ARGUMENT`` - error. - last_statement (bool): - Optional. If set to ``true``, this statement marks the end - of the transaction. After this statement executes, you must - commit or abort the transaction. Attempts to execute any - other requests against this transaction (including reads and - queries) are rejected. - - For DML statements, setting this option might cause some - error reporting to be deferred until commit time (for - example, validation of unique constraints). Given this, - successful execution of a DML statement shouldn't be assumed - until a subsequent ``Commit`` call completes successfully. - """ - class QueryMode(proto.Enum): - r"""Mode in which the statement must be processed. - - Values: - NORMAL (0): - The default mode. Only the statement results - are returned. - PLAN (1): - This mode returns only the query plan, - without any results or execution statistics - information. - PROFILE (2): - This mode returns the query plan, overall - execution statistics, operator level execution - statistics along with the results. This has a - performance overhead compared to the other - modes. It isn't recommended to use this mode for - production traffic. - WITH_STATS (3): - This mode returns the overall (but not - operator-level) execution statistics along with - the results. - WITH_PLAN_AND_STATS (4): - This mode returns the query plan, overall - (but not operator-level) execution statistics - along with the results. - """ - NORMAL = 0 - PLAN = 1 - PROFILE = 2 - WITH_STATS = 3 - WITH_PLAN_AND_STATS = 4 - - class QueryOptions(proto.Message): - r"""Query optimizer configuration. - - Attributes: - optimizer_version (str): - An option to control the selection of optimizer version. - - This parameter allows individual queries to pick different - query optimizer versions. - - Specifying ``latest`` as a value instructs Cloud Spanner to - use the latest supported query optimizer version. If not - specified, Cloud Spanner uses the optimizer version set at - the database level options. Any other positive integer (from - the list of supported optimizer versions) overrides the - default optimizer version for query execution. - - The list of supported optimizer versions can be queried from - ``SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS``. - - Executing a SQL statement with an invalid optimizer version - fails with an ``INVALID_ARGUMENT`` error. - - See - https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer - for more information on managing the query optimizer. - - The ``optimizer_version`` statement hint has precedence over - this setting. - optimizer_statistics_package (str): - An option to control the selection of optimizer statistics - package. - - This parameter allows individual queries to use a different - query optimizer statistics package. - - Specifying ``latest`` as a value instructs Cloud Spanner to - use the latest generated statistics package. If not - specified, Cloud Spanner uses the statistics package set at - the database level options, or the latest package if the - database option isn't set. - - The statistics package requested by the query has to be - exempt from garbage collection. This can be achieved with - the following DDL statement: - - .. code:: sql - - ALTER STATISTICS SET OPTIONS (allow_gc=false) - - The list of available statistics packages can be queried - from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. - - Executing a SQL statement with an invalid optimizer - statistics package or with a statistics package that allows - garbage collection fails with an ``INVALID_ARGUMENT`` error. - """ - - optimizer_version: str = proto.Field( - proto.STRING, - number=1, - ) - optimizer_statistics_package: str = proto.Field( - proto.STRING, - number=2, - ) - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - sql: str = proto.Field( - proto.STRING, - number=3, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Struct, - ) - param_types: MutableMapping[str, gs_type.Type] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message=gs_type.Type, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=6, - ) - query_mode: QueryMode = proto.Field( - proto.ENUM, - number=7, - enum=QueryMode, - ) - partition_token: bytes = proto.Field( - proto.BYTES, - number=8, - ) - seqno: int = proto.Field( - proto.INT64, - number=9, - ) - query_options: QueryOptions = proto.Field( - proto.MESSAGE, - number=10, - message=QueryOptions, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=11, - message='RequestOptions', - ) - directed_read_options: 'DirectedReadOptions' = proto.Field( - proto.MESSAGE, - number=15, - message='DirectedReadOptions', - ) - data_boost_enabled: bool = proto.Field( - proto.BOOL, - number=16, - ) - last_statement: bool = proto.Field( - proto.BOOL, - number=17, - ) - - -class ExecuteBatchDmlRequest(proto.Message): - r"""The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - - Attributes: - session (str): - Required. The session in which the DML - statements should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Required. The transaction to use. Must be a - read-write transaction. - To protect against replays, single-use - transactions are not supported. The caller must - either supply an existing transaction ID or - begin a new transaction. - statements (MutableSequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement]): - Required. The list of statements to execute in this batch. - Statements are executed serially, such that the effects of - statement ``i`` are visible to statement ``i+1``. Each - statement must be a DML statement. Execution stops at the - first failed statement; the remaining statements are not - executed. - - Callers must provide at least one statement. - seqno (int): - Required. A per-transaction sequence number - used to identify this request. This field makes - each request idempotent such that if the request - is received multiple times, at most one - succeeds. - - The sequence number must be monotonically - increasing within the transaction. If a request - arrives for the first time with an out-of-order - sequence number, the transaction might be - aborted. Replays of previously handled requests - yield the same response as the first execution. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - last_statements (bool): - Optional. If set to ``true``, this request marks the end of - the transaction. After these statements execute, you must - commit or abort the transaction. Attempts to execute any - other requests against this transaction (including reads and - queries) are rejected. - - Setting this option might cause some error reporting to be - deferred until commit time (for example, validation of - unique constraints). Given this, successful execution of - statements shouldn't be assumed until a subsequent - ``Commit`` call completes successfully. - """ - - class Statement(proto.Message): - r"""A single DML statement. - - Attributes: - sql (str): - Required. The DML string. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - DML string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names can contain letters, - numbers, and underscores. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It's an error to execute a SQL statement with unbound - parameters. - param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): - It isn't always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] - as JSON strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL statement - parameters. See the definition of - [Type][google.spanner.v1.Type] for more information about - SQL types. - """ - - sql: str = proto.Field( - proto.STRING, - number=1, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - param_types: MutableMapping[str, gs_type.Type] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message=gs_type.Type, - ) - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - statements: MutableSequence[Statement] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Statement, - ) - seqno: int = proto.Field( - proto.INT64, - number=4, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=5, - message='RequestOptions', - ) - last_statements: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class ExecuteBatchDmlResponse(proto.Message): - r"""The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of [ResultSet][google.spanner.v1.ResultSet] - messages, one for each DML statement that has successfully executed, - in the same order as the statements in the request. If a statement - fails, the status in the response body identifies the cause of the - failure. - - To check for DML statements that failed, use the following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value ``OK`` indicates - that all statements were executed successfully. - 2. If the status was not ``OK``, check the number of result sets in - the response. If the response contains ``N`` - [ResultSet][google.spanner.v1.ResultSet] messages, then statement - ``N+1`` in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed successfully. - - Response: 5 [ResultSet][google.spanner.v1.ResultSet] messages, - with the status ``OK``. - - Example 2: - - - Request: 5 DML statements. The third statement has a syntax error. - - Response: 2 [ResultSet][google.spanner.v1.ResultSet] messages, and - a syntax error (``INVALID_ARGUMENT``) status. The number of - [ResultSet][google.spanner.v1.ResultSet] messages indicates that - the third statement failed, and the fourth and fifth statements - were not executed. - - Attributes: - result_sets (MutableSequence[google.cloud.spanner_v1.types.ResultSet]): - One [ResultSet][google.spanner.v1.ResultSet] for each - statement in the request that ran successfully, in the same - order as the statements in the request. Each - [ResultSet][google.spanner.v1.ResultSet] does not contain - any rows. The - [ResultSetStats][google.spanner.v1.ResultSetStats] in each - [ResultSet][google.spanner.v1.ResultSet] contain the number - of rows modified by the statement. - - Only the first [ResultSet][google.spanner.v1.ResultSet] in - the response contains valid - [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. - status (google.rpc.status_pb2.Status): - If all DML statements are executed successfully, the status - is ``OK``. Otherwise, the error status of the first failed - statement. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token is included if the read-write - transaction is on a multiplexed session. Pass the precommit - token with the highest sequence number from this transaction - attempt should be passed to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. - """ - - result_sets: MutableSequence[result_set.ResultSet] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=result_set.ResultSet, - ) - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=3, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - - -class PartitionOptions(proto.Message): - r"""Options for a ``PartitionQueryRequest`` and - ``PartitionReadRequest``. - - Attributes: - partition_size_bytes (int): - **Note:** This hint is currently ignored by - ``PartitionQuery`` and ``PartitionRead`` requests. - - The desired data size for each partition generated. The - default for this option is currently 1 GiB. This is only a - hint. The actual size of each partition can be smaller or - larger than this size request. - max_partitions (int): - **Note:** This hint is currently ignored by - ``PartitionQuery`` and ``PartitionRead`` requests. - - The desired maximum number of partitions to return. For - example, this might be set to the number of workers - available. The default for this option is currently 10,000. - The maximum value is currently 200,000. This is only a hint. - The actual number of partitions returned can be smaller or - larger than this maximum count request. - """ - - partition_size_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - max_partitions: int = proto.Field( - proto.INT64, - number=2, - ) - - -class PartitionQueryRequest(proto.Message): - r"""The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - - Attributes: - session (str): - Required. The session used to create the - partitions. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Read-only snapshot transactions are - supported, read and write and single-use - transactions are not. - sql (str): - Required. The query request to generate partitions for. The - request fails if the query isn't root partitionable. For a - query to be root partitionable, it needs to satisfy a few - conditions. For example, if the query execution plan - contains a distributed union operator, then it must be the - first operator in the plan. For more information about other - conditions, see `Read data in - parallel `__. - - The query request must not contain DML commands, such as - ``INSERT``, ``UPDATE``, or ``DELETE``. Use - [``ExecuteStreamingSql``][google.spanner.v1.Spanner.ExecuteStreamingSql] - with a ``PartitionedDml`` transaction for large, - partition-friendly DML operations. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - SQL string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names can contain letters, - numbers, and underscores. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It's an error to execute a SQL statement with unbound - parameters. - param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): - It isn't always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.PartitionQueryRequest.params] as - JSON strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL query parameters. - See the definition of [Type][google.spanner.v1.Type] for - more information about SQL types. - partition_options (google.cloud.spanner_v1.types.PartitionOptions): - Additional options that affect how many - partitions are created. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - sql: str = proto.Field( - proto.STRING, - number=3, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Struct, - ) - param_types: MutableMapping[str, gs_type.Type] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message=gs_type.Type, - ) - partition_options: 'PartitionOptions' = proto.Field( - proto.MESSAGE, - number=6, - message='PartitionOptions', - ) - - -class PartitionReadRequest(proto.Message): - r"""The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - Attributes: - session (str): - Required. The session used to create the - partitions. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Read only snapshot transactions are - supported, read/write and single use - transactions are not. - table (str): - Required. The name of the table in the - database to be read. - index (str): - If non-empty, the name of an index on - [table][google.spanner.v1.PartitionReadRequest.table]. This - index is used instead of the table primary key when - interpreting - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - and sorting result rows. See - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - for further information. - columns (MutableSequence[str]): - The columns of - [table][google.spanner.v1.PartitionReadRequest.table] to be - returned for each row matching this request. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. ``key_set`` identifies the rows to be yielded. - ``key_set`` names the primary keys of the rows in - [table][google.spanner.v1.PartitionReadRequest.table] to be - yielded, unless - [index][google.spanner.v1.PartitionReadRequest.index] is - present. If - [index][google.spanner.v1.PartitionReadRequest.index] is - present, then - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - instead names index keys in - [index][google.spanner.v1.PartitionReadRequest.index]. - - It isn't an error for the ``key_set`` to name rows that - don't exist in the database. Read yields nothing for - nonexistent rows. - partition_options (google.cloud.spanner_v1.types.PartitionOptions): - Additional options that affect how many - partitions are created. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - table: str = proto.Field( - proto.STRING, - number=3, - ) - index: str = proto.Field( - proto.STRING, - number=4, - ) - columns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - key_set: keys.KeySet = proto.Field( - proto.MESSAGE, - number=6, - message=keys.KeySet, - ) - partition_options: 'PartitionOptions' = proto.Field( - proto.MESSAGE, - number=9, - message='PartitionOptions', - ) - - -class Partition(proto.Message): - r"""Information returned for each partition returned in a - PartitionResponse. - - Attributes: - partition_token (bytes): - This token can be passed to ``Read``, ``StreamingRead``, - ``ExecuteSql``, or ``ExecuteStreamingSql`` requests to - restrict the results to those identified by this partition - token. - """ - - partition_token: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class PartitionResponse(proto.Message): - r"""The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - Attributes: - partitions (MutableSequence[google.cloud.spanner_v1.types.Partition]): - Partitions created by this request. - transaction (google.cloud.spanner_v1.types.Transaction): - Transaction created by this request. - """ - - partitions: MutableSequence['Partition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Partition', - ) - transaction: gs_transaction.Transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.Transaction, - ) - - -class ReadRequest(proto.Message): - r"""The request for [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - - Attributes: - session (str): - Required. The session in which the read - should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - The transaction to use. If none is provided, - the default is a temporary read-only transaction - with strong concurrency. - table (str): - Required. The name of the table in the - database to be read. - index (str): - If non-empty, the name of an index on - [table][google.spanner.v1.ReadRequest.table]. This index is - used instead of the table primary key when interpreting - [key_set][google.spanner.v1.ReadRequest.key_set] and sorting - result rows. See - [key_set][google.spanner.v1.ReadRequest.key_set] for further - information. - columns (MutableSequence[str]): - Required. The columns of - [table][google.spanner.v1.ReadRequest.table] to be returned - for each row matching this request. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. ``key_set`` identifies the rows to be yielded. - ``key_set`` names the primary keys of the rows in - [table][google.spanner.v1.ReadRequest.table] to be yielded, - unless [index][google.spanner.v1.ReadRequest.index] is - present. If [index][google.spanner.v1.ReadRequest.index] is - present, then - [key_set][google.spanner.v1.ReadRequest.key_set] instead - names index keys in - [index][google.spanner.v1.ReadRequest.index]. - - If the - [partition_token][google.spanner.v1.ReadRequest.partition_token] - field is empty, rows are yielded in table primary key order - (if [index][google.spanner.v1.ReadRequest.index] is empty) - or index key order (if - [index][google.spanner.v1.ReadRequest.index] is non-empty). - If the - [partition_token][google.spanner.v1.ReadRequest.partition_token] - field isn't empty, rows are yielded in an unspecified order. - - It isn't an error for the ``key_set`` to name rows that - don't exist in the database. Read yields nothing for - nonexistent rows. - limit (int): - If greater than zero, only the first ``limit`` rows are - yielded. If ``limit`` is zero, the default is no limit. A - limit can't be specified if ``partition_token`` is set. - resume_token (bytes): - If this request is resuming a previously interrupted read, - ``resume_token`` should be copied from the last - [PartialResultSet][google.spanner.v1.PartialResultSet] - yielded before the interruption. Doing this enables the new - read to resume where the last read left off. The rest of the - request parameters must exactly match the request that - yielded this token. - partition_token (bytes): - If present, results are restricted to the specified - partition previously created using ``PartitionRead``. There - must be an exact match for the values of fields common to - this message and the PartitionReadRequest message used to - create this partition_token. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): - Directed read options for this request. - data_boost_enabled (bool): - If this is for a partitioned read and this field is set to - ``true``, the request is executed with Spanner Data Boost - independent compute resources. - - If the field is set to ``true`` but the request doesn't set - ``partition_token``, the API returns an ``INVALID_ARGUMENT`` - error. - order_by (google.cloud.spanner_v1.types.ReadRequest.OrderBy): - Optional. Order for the returned rows. - - By default, Spanner returns result rows in primary key order - except for PartitionRead requests. For applications that - don't require rows to be returned in primary key - (``ORDER_BY_PRIMARY_KEY``) order, setting - ``ORDER_BY_NO_ORDER`` option allows Spanner to optimize row - retrieval, resulting in lower latencies in certain cases - (for example, bulk point lookups). - lock_hint (google.cloud.spanner_v1.types.ReadRequest.LockHint): - Optional. Lock Hint for the request, it can - only be used with read-write transactions. - """ - class OrderBy(proto.Enum): - r"""An option to control the order in which rows are returned - from a read. - - Values: - ORDER_BY_UNSPECIFIED (0): - Default value. - - ``ORDER_BY_UNSPECIFIED`` is equivalent to - ``ORDER_BY_PRIMARY_KEY``. - ORDER_BY_PRIMARY_KEY (1): - Read rows are returned in primary key order. - - In the event that this option is used in conjunction with - the ``partition_token`` field, the API returns an - ``INVALID_ARGUMENT`` error. - ORDER_BY_NO_ORDER (2): - Read rows are returned in any order. - """ - ORDER_BY_UNSPECIFIED = 0 - ORDER_BY_PRIMARY_KEY = 1 - ORDER_BY_NO_ORDER = 2 - - class LockHint(proto.Enum): - r"""A lock hint mechanism for reads done within a transaction. - - Values: - LOCK_HINT_UNSPECIFIED (0): - Default value. - - ``LOCK_HINT_UNSPECIFIED`` is equivalent to - ``LOCK_HINT_SHARED``. - LOCK_HINT_SHARED (1): - Acquire shared locks. - - By default when you perform a read as part of a read-write - transaction, Spanner acquires shared read locks, which - allows other reads to still access the data until your - transaction is ready to commit. When your transaction is - committing and writes are being applied, the transaction - attempts to upgrade to an exclusive lock for any data you - are writing. For more information about locks, see `Lock - modes `__. - LOCK_HINT_EXCLUSIVE (2): - Acquire exclusive locks. - - Requesting exclusive locks is beneficial if you observe high - write contention, which means you notice that multiple - transactions are concurrently trying to read and write to - the same data, resulting in a large number of aborts. This - problem occurs when two transactions initially acquire - shared locks and then both try to upgrade to exclusive locks - at the same time. In this situation both transactions are - waiting for the other to give up their lock, resulting in a - deadlocked situation. Spanner is able to detect this - occurring and force one of the transactions to abort. - However, this is a slow and expensive operation and results - in lower performance. In this case it makes sense to acquire - exclusive locks at the start of the transaction because then - when multiple transactions try to act on the same data, they - automatically get serialized. Each transaction waits its - turn to acquire the lock and avoids getting into deadlock - situations. - - Because the exclusive lock hint is just a hint, it shouldn't - be considered equivalent to a mutex. In other words, you - shouldn't use Spanner exclusive locks as a mutual exclusion - mechanism for the execution of code outside of Spanner. - - **Note:** Request exclusive locks judiciously because they - block others from reading that data for the entire - transaction, rather than just when the writes are being - performed. Unless you observe high write contention, you - should use the default of shared read locks so you don't - prematurely block other clients from reading the data that - you're writing to. - """ - LOCK_HINT_UNSPECIFIED = 0 - LOCK_HINT_SHARED = 1 - LOCK_HINT_EXCLUSIVE = 2 - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - table: str = proto.Field( - proto.STRING, - number=3, - ) - index: str = proto.Field( - proto.STRING, - number=4, - ) - columns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - key_set: keys.KeySet = proto.Field( - proto.MESSAGE, - number=6, - message=keys.KeySet, - ) - limit: int = proto.Field( - proto.INT64, - number=8, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=9, - ) - partition_token: bytes = proto.Field( - proto.BYTES, - number=10, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=11, - message='RequestOptions', - ) - directed_read_options: 'DirectedReadOptions' = proto.Field( - proto.MESSAGE, - number=14, - message='DirectedReadOptions', - ) - data_boost_enabled: bool = proto.Field( - proto.BOOL, - number=15, - ) - order_by: OrderBy = proto.Field( - proto.ENUM, - number=16, - enum=OrderBy, - ) - lock_hint: LockHint = proto.Field( - proto.ENUM, - number=17, - enum=LockHint, - ) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - - Attributes: - session (str): - Required. The session in which the - transaction runs. - options (google.cloud.spanner_v1.types.TransactionOptions): - Required. Options for the new transaction. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. Priority is ignored for - this request. Setting the priority in this - ``request_options`` struct doesn't do anything. To set the - priority for a transaction, set it on the reads and writes - that are part of this transaction instead. - mutation_key (google.cloud.spanner_v1.types.Mutation): - Optional. Required for read-write - transactions on a multiplexed session that - commit mutations but don't perform any reads or - queries. You must randomly select one of the - mutations from the mutation set and send it as a - part of this request. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - options: gs_transaction.TransactionOptions = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionOptions, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='RequestOptions', - ) - mutation_key: mutation.Mutation = proto.Field( - proto.MESSAGE, - number=4, - message=mutation.Mutation, - ) - - -class CommitRequest(proto.Message): - r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - session (str): - Required. The session in which the - transaction to be committed is running. - transaction_id (bytes): - Commit a previously-started transaction. - - This field is a member of `oneof`_ ``transaction``. - single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, commit - with a temporary transaction is non-idempotent. That is, if - the ``CommitRequest`` is sent to Cloud Spanner more than - once (for instance, due to retries in the application, or in - the transport library), it's possible that the mutations are - executed more than once. If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This field is a member of `oneof`_ ``transaction``. - mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): - The mutations to be executed when this - transaction commits. All mutations are applied - atomically, in the order they appear in this - list. - return_commit_stats (bool): - If ``true``, then statistics related to the transaction is - included in the - [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. - Default value is ``false``. - max_commit_delay (google.protobuf.duration_pb2.Duration): - Optional. The amount of latency this request - is configured to incur in order to improve - throughput. If this field isn't set, Spanner - assumes requests are relatively latency - sensitive and automatically determines an - appropriate delay time. You can specify a commit - delay value between 0 and 500 ms. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. If the read-write transaction was executed on a - multiplexed session, then you must include the precommit - token with the highest sequence number received in this - transaction attempt. Failing to do so results in a - ``FailedPrecondition`` error. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction_id: bytes = proto.Field( - proto.BYTES, - number=2, - oneof='transaction', - ) - single_use_transaction: gs_transaction.TransactionOptions = proto.Field( - proto.MESSAGE, - number=3, - oneof='transaction', - message=gs_transaction.TransactionOptions, - ) - mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=mutation.Mutation, - ) - return_commit_stats: bool = proto.Field( - proto.BOOL, - number=5, - ) - max_commit_delay: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=6, - message='RequestOptions', - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=9, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - - -class RollbackRequest(proto.Message): - r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. - - Attributes: - session (str): - Required. The session in which the - transaction to roll back is running. - transaction_id (bytes): - Required. The transaction to roll back. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction_id: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class BatchWriteRequest(proto.Message): - r"""The request for [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - - Attributes: - session (str): - Required. The session in which the batch - request is to be run. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): - Required. The groups of mutations to be - applied. - exclude_txn_from_change_streams (bool): - Optional. If you don't set the - ``exclude_txn_from_change_streams`` option or if it's set to - ``false``, then any change streams monitoring columns - modified by transactions will capture the updates made - within that transaction. - """ - - class MutationGroup(proto.Message): - r"""A group of mutations to be committed together. Related - mutations should be placed in a group. For example, two - mutations inserting rows with the same primary key prefix in - both parent and child tables are related. - - Attributes: - mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): - Required. The mutations in this group. - """ - - mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=mutation.Mutation, - ) - - session: str = proto.Field( - proto.STRING, - number=1, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='RequestOptions', - ) - mutation_groups: MutableSequence[MutationGroup] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=MutationGroup, - ) - exclude_txn_from_change_streams: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class BatchWriteResponse(proto.Message): - r"""The result of applying a batch of mutations. - - Attributes: - indexes (MutableSequence[int]): - The mutation groups applied in this batch. The values index - into the ``mutation_groups`` field in the corresponding - ``BatchWriteRequest``. - status (google.rpc.status_pb2.Status): - An ``OK`` status indicates success. Any other status - indicates a failure. - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - The commit timestamp of the transaction that applied this - batch. Present if ``status`` is ``OK``, absent otherwise. - """ - - indexes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=1, - ) - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py deleted file mode 100644 index 5e13285565..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'TransactionOptions', - 'Transaction', - 'TransactionSelector', - 'MultiplexedSessionPrecommitToken', - }, -) - - -class TransactionOptions(proto.Message): - r"""Options to use for transactions. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): - Transaction may write. - - Authorization to begin a read-write transaction requires - ``spanner.databases.beginOrRollbackReadWriteTransaction`` - permission on the ``session`` resource. - - This field is a member of `oneof`_ ``mode``. - partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): - Partitioned DML transaction. - - Authorization to begin a Partitioned DML transaction - requires - ``spanner.databases.beginPartitionedDmlTransaction`` - permission on the ``session`` resource. - - This field is a member of `oneof`_ ``mode``. - read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): - Transaction does not write. - - Authorization to begin a read-only transaction requires - ``spanner.databases.beginReadOnlyTransaction`` permission on - the ``session`` resource. - - This field is a member of `oneof`_ ``mode``. - exclude_txn_from_change_streams (bool): - When ``exclude_txn_from_change_streams`` is set to ``true``, - it prevents read or write transactions from being tracked in - change streams. - - - If the DDL option ``allow_txn_exclusion`` is set to - ``true``, then the updates made within this transaction - aren't recorded in the change stream. - - - If you don't set the DDL option ``allow_txn_exclusion`` or - if it's set to ``false``, then the updates made within - this transaction are recorded in the change stream. - - When ``exclude_txn_from_change_streams`` is set to ``false`` - or not set, modifications from this transaction are recorded - in all change streams that are tracking columns modified by - these transactions. - - The ``exclude_txn_from_change_streams`` option can only be - specified for read-write or partitioned DML transactions, - otherwise the API returns an ``INVALID_ARGUMENT`` error. - isolation_level (google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel): - Isolation level for the transaction. - """ - class IsolationLevel(proto.Enum): - r"""``IsolationLevel`` is used when setting ``isolation_level`` for a - transaction. - - Values: - ISOLATION_LEVEL_UNSPECIFIED (0): - Default value. - - If the value is not specified, the ``SERIALIZABLE`` - isolation level is used. - SERIALIZABLE (1): - All transactions appear as if they executed in a serial - order, even if some of the reads, writes, and other - operations of distinct transactions actually occurred in - parallel. Spanner assigns commit timestamps that reflect the - order of committed transactions to implement this property. - Spanner offers a stronger guarantee than serializability - called external consistency. For more information, see - `TrueTime and external - consistency `__. - REPEATABLE_READ (2): - All reads performed during the transaction observe a - consistent snapshot of the database, and the transaction is - only successfully committed in the absence of conflicts - between its updates and any concurrent updates that have - occurred since that snapshot. Consequently, in contrast to - ``SERIALIZABLE`` transactions, only write-write conflicts - are detected in snapshot transactions. - - This isolation level does not support Read-only and - Partitioned DML transactions. - - When ``REPEATABLE_READ`` is specified on a read-write - transaction, the locking semantics default to - ``OPTIMISTIC``. - """ - ISOLATION_LEVEL_UNSPECIFIED = 0 - SERIALIZABLE = 1 - REPEATABLE_READ = 2 - - class ReadWrite(proto.Message): - r"""Message type to initiate a read-write transaction. Currently - this transaction type has no options. - - Attributes: - read_lock_mode (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite.ReadLockMode): - Read lock mode for the transaction. - multiplexed_session_previous_transaction_id (bytes): - Optional. Clients should pass the transaction - ID of the previous transaction attempt that was - aborted if this transaction is being executed on - a multiplexed session. - """ - class ReadLockMode(proto.Enum): - r"""``ReadLockMode`` is used to set the read lock mode for read-write - transactions. - - Values: - READ_LOCK_MODE_UNSPECIFIED (0): - Default value. - - - If isolation level is - [REPEATABLE_READ][google.spanner.v1.TransactionOptions.IsolationLevel.REPEATABLE_READ], - then it is an error to specify ``read_lock_mode``. Locking - semantics default to ``OPTIMISTIC``. No validation checks - are done for reads, except to validate that the data that - was served at the snapshot time is unchanged at commit - time in the following cases: - - 1. reads done as part of queries that use - ``SELECT FOR UPDATE`` - 2. reads done as part of statements with a - ``LOCK_SCANNED_RANGES`` hint - 3. reads done as part of DML statements - - - At all other isolation levels, if ``read_lock_mode`` is - the default value, then pessimistic read locks are used. - PESSIMISTIC (1): - Pessimistic lock mode. - - Read locks are acquired immediately on read. Semantics - described only applies to - [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] - isolation. - OPTIMISTIC (2): - Optimistic lock mode. - - Locks for reads within the transaction are not acquired on - read. Instead the locks are acquired on a commit to validate - that read/queried data has not changed since the transaction - started. Semantics described only applies to - [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] - isolation. - """ - READ_LOCK_MODE_UNSPECIFIED = 0 - PESSIMISTIC = 1 - OPTIMISTIC = 2 - - read_lock_mode: 'TransactionOptions.ReadWrite.ReadLockMode' = proto.Field( - proto.ENUM, - number=1, - enum='TransactionOptions.ReadWrite.ReadLockMode', - ) - multiplexed_session_previous_transaction_id: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - class PartitionedDml(proto.Message): - r"""Message type to initiate a Partitioned DML transaction. - """ - - class ReadOnly(proto.Message): - r"""Message type to initiate a read-only transaction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - strong (bool): - Read at a timestamp where all previously - committed transactions are visible. - - This field is a member of `oneof`_ ``timestamp_bound``. - min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Executes all reads at a timestamp >= ``min_read_timestamp``. - - This is useful for requesting fresher data than some - previous read, or data that is fresh enough to observe the - effects of some previously committed transaction whose - timestamp is known. - - Note that this option can only be used in single-use - transactions. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - - This field is a member of `oneof`_ ``timestamp_bound``. - max_staleness (google.protobuf.duration_pb2.Duration): - Read data at a timestamp >= ``NOW - max_staleness`` seconds. - Guarantees that all writes that have committed more than the - specified number of seconds ago are visible. Because Cloud - Spanner chooses the exact timestamp, this mode works even if - the client's local clock is substantially skewed from Cloud - Spanner commit timestamps. - - Useful for reading the freshest data available at a nearby - replica, while bounding the possible staleness if the local - replica has fallen behind. - - Note that this option can only be used in single-use - transactions. - - This field is a member of `oneof`_ ``timestamp_bound``. - read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Executes all reads at the given timestamp. Unlike other - modes, reads at a specific timestamp are repeatable; the - same read at the same timestamp always returns the same - data. If the timestamp is in the future, the read is blocked - until the specified timestamp, modulo the read's deadline. - - Useful for large scale consistent reads such as mapreduces, - or for coordinating many reads against a consistent snapshot - of the data. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - - This field is a member of `oneof`_ ``timestamp_bound``. - exact_staleness (google.protobuf.duration_pb2.Duration): - Executes all reads at a timestamp that is - ``exact_staleness`` old. The timestamp is chosen soon after - the read is started. - - Guarantees that all writes that have committed more than the - specified number of seconds ago are visible. Because Cloud - Spanner chooses the exact timestamp, this mode works even if - the client's local clock is substantially skewed from Cloud - Spanner commit timestamps. - - Useful for reading at nearby replicas without the - distributed timestamp negotiation overhead of - ``max_staleness``. - - This field is a member of `oneof`_ ``timestamp_bound``. - return_read_timestamp (bool): - If true, the Cloud Spanner-selected read timestamp is - included in the [Transaction][google.spanner.v1.Transaction] - message that describes the transaction. - """ - - strong: bool = proto.Field( - proto.BOOL, - number=1, - oneof='timestamp_bound', - ) - min_read_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof='timestamp_bound', - message=timestamp_pb2.Timestamp, - ) - max_staleness: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - oneof='timestamp_bound', - message=duration_pb2.Duration, - ) - read_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof='timestamp_bound', - message=timestamp_pb2.Timestamp, - ) - exact_staleness: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=5, - oneof='timestamp_bound', - message=duration_pb2.Duration, - ) - return_read_timestamp: bool = proto.Field( - proto.BOOL, - number=6, - ) - - read_write: ReadWrite = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ReadWrite, - ) - partitioned_dml: PartitionedDml = proto.Field( - proto.MESSAGE, - number=3, - oneof='mode', - message=PartitionedDml, - ) - read_only: ReadOnly = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=ReadOnly, - ) - exclude_txn_from_change_streams: bool = proto.Field( - proto.BOOL, - number=5, - ) - isolation_level: IsolationLevel = proto.Field( - proto.ENUM, - number=6, - enum=IsolationLevel, - ) - - -class Transaction(proto.Message): - r"""A transaction. - - Attributes: - id (bytes): - ``id`` may be used to identify the transaction in subsequent - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], - [Commit][google.spanner.v1.Spanner.Commit], or - [Rollback][google.spanner.v1.Spanner.Rollback] calls. - - Single-use read-only transactions do not have IDs, because - single-use transactions do not support multiple requests. - read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - For snapshot read-only transactions, the read timestamp - chosen for the transaction. Not returned by default: see - [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - A precommit token is included in the response of a - BeginTransaction request if the read-write transaction is on - a multiplexed session and a mutation_key was specified in - the - [BeginTransaction][google.spanner.v1.BeginTransactionRequest]. - The precommit token with the highest sequence number from - this transaction attempt should be passed to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. - """ - - id: bytes = proto.Field( - proto.BYTES, - number=1, - ) - read_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - precommit_token: 'MultiplexedSessionPrecommitToken' = proto.Field( - proto.MESSAGE, - number=3, - message='MultiplexedSessionPrecommitToken', - ) - - -class TransactionSelector(proto.Message): - r"""This message is used to select the transaction in which a - [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. - - See [TransactionOptions][google.spanner.v1.TransactionOptions] for - more information about transactions. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - single_use (google.cloud.spanner_v1.types.TransactionOptions): - Execute the read or SQL query in a temporary - transaction. This is the most efficient way to - execute a transaction that consists of a single - SQL query. - - This field is a member of `oneof`_ ``selector``. - id (bytes): - Execute the read or SQL query in a - previously-started transaction. - - This field is a member of `oneof`_ ``selector``. - begin (google.cloud.spanner_v1.types.TransactionOptions): - Begin a new transaction and execute this read or SQL query - in it. The transaction ID of the new transaction is returned - in - [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], - which is a [Transaction][google.spanner.v1.Transaction]. - - This field is a member of `oneof`_ ``selector``. - """ - - single_use: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=1, - oneof='selector', - message='TransactionOptions', - ) - id: bytes = proto.Field( - proto.BYTES, - number=2, - oneof='selector', - ) - begin: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='selector', - message='TransactionOptions', - ) - - -class MultiplexedSessionPrecommitToken(proto.Message): - r"""When a read-write transaction is executed on a multiplexed session, - this precommit token is sent back to the client as a part of the - [Transaction][google.spanner.v1.Transaction] message in the - [BeginTransaction][google.spanner.v1.BeginTransactionRequest] - response and also as a part of the - [ResultSet][google.spanner.v1.ResultSet] and - [PartialResultSet][google.spanner.v1.PartialResultSet] responses. - - Attributes: - precommit_token (bytes): - Opaque precommit token. - seq_num (int): - An incrementing seq number is generated on - every precommit token that is returned. Clients - should remember the precommit token with the - highest sequence number from the current - transaction attempt. - """ - - precommit_token: bytes = proto.Field( - proto.BYTES, - number=1, - ) - seq_num: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py deleted file mode 100644 index 5b6365599e..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'TypeCode', - 'TypeAnnotationCode', - 'Type', - 'StructType', - }, -) - - -class TypeCode(proto.Enum): - r"""``TypeCode`` is used as part of [Type][google.spanner.v1.Type] to - indicate the type of a Cloud Spanner value. - - Each legal value of a type can be encoded to or decoded from a JSON - value, using the encodings described below. All Cloud Spanner values - can be ``null``, regardless of type; ``null``\ s are always encoded - as a JSON ``null``. - - Values: - TYPE_CODE_UNSPECIFIED (0): - Not specified. - BOOL (1): - Encoded as JSON ``true`` or ``false``. - INT64 (2): - Encoded as ``string``, in decimal format. - FLOAT64 (3): - Encoded as ``number``, or the strings ``"NaN"``, - ``"Infinity"``, or ``"-Infinity"``. - FLOAT32 (15): - Encoded as ``number``, or the strings ``"NaN"``, - ``"Infinity"``, or ``"-Infinity"``. - TIMESTAMP (4): - Encoded as ``string`` in RFC 3339 timestamp format. The time - zone must be present, and must be ``"Z"``. - - If the schema has the column option - ``allow_commit_timestamp=true``, the placeholder string - ``"spanner.commit_timestamp()"`` can be used to instruct the - system to insert the commit timestamp associated with the - transaction commit. - DATE (5): - Encoded as ``string`` in RFC 3339 date format. - STRING (6): - Encoded as ``string``. - BYTES (7): - Encoded as a base64-encoded ``string``, as described in RFC - 4648, section 4. - ARRAY (8): - Encoded as ``list``, where the list elements are represented - according to - [array_element_type][google.spanner.v1.Type.array_element_type]. - STRUCT (9): - Encoded as ``list``, where list element ``i`` is represented - according to - [struct_type.fields[i]][google.spanner.v1.StructType.fields]. - NUMERIC (10): - Encoded as ``string``, in decimal format or scientific - notation format. Decimal format: ``[+-]Digits[.[Digits]]`` - or ``[+-][Digits].Digits`` - - Scientific notation: - ``[+-]Digits[.[Digits]][ExponentIndicator[+-]Digits]`` or - ``[+-][Digits].Digits[ExponentIndicator[+-]Digits]`` - (ExponentIndicator is ``"e"`` or ``"E"``) - JSON (11): - Encoded as a JSON-formatted ``string`` as described in RFC - 7159. The following rules are applied when parsing JSON - input: - - - Whitespace characters are not preserved. - - If a JSON object has duplicate keys, only the first key is - preserved. - - Members of a JSON object are not guaranteed to have their - order preserved. - - JSON array elements will have their order preserved. - PROTO (13): - Encoded as a base64-encoded ``string``, as described in RFC - 4648, section 4. - ENUM (14): - Encoded as ``string``, in decimal format. - INTERVAL (16): - Encoded as ``string``, in ``ISO8601`` duration format - - ``P[n]Y[n]M[n]DT[n]H[n]M[n[.fraction]]S`` where ``n`` is an - integer. For example, ``P1Y2M3DT4H5M6.5S`` represents time - duration of 1 year, 2 months, 3 days, 4 hours, 5 minutes, - and 6.5 seconds. - UUID (17): - Encoded as ``string``, in lower-case hexa-decimal format, as - described in RFC 9562, section 4. - """ - TYPE_CODE_UNSPECIFIED = 0 - BOOL = 1 - INT64 = 2 - FLOAT64 = 3 - FLOAT32 = 15 - TIMESTAMP = 4 - DATE = 5 - STRING = 6 - BYTES = 7 - ARRAY = 8 - STRUCT = 9 - NUMERIC = 10 - JSON = 11 - PROTO = 13 - ENUM = 14 - INTERVAL = 16 - UUID = 17 - - -class TypeAnnotationCode(proto.Enum): - r"""``TypeAnnotationCode`` is used as a part of - [Type][google.spanner.v1.Type] to disambiguate SQL types that should - be used for a given Cloud Spanner value. Disambiguation is needed - because the same Cloud Spanner type can be mapped to different SQL - types depending on SQL dialect. TypeAnnotationCode doesn't affect - the way value is serialized. - - Values: - TYPE_ANNOTATION_CODE_UNSPECIFIED (0): - Not specified. - PG_NUMERIC (2): - PostgreSQL compatible NUMERIC type. This annotation needs to - be applied to [Type][google.spanner.v1.Type] instances - having [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] type - code to specify that values of this type should be treated - as PostgreSQL NUMERIC values. Currently this annotation is - always needed for - [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] when a client - interacts with PostgreSQL-enabled Spanner databases. - PG_JSONB (3): - PostgreSQL compatible JSONB type. This annotation needs to - be applied to [Type][google.spanner.v1.Type] instances - having [JSON][google.spanner.v1.TypeCode.JSON] type code to - specify that values of this type should be treated as - PostgreSQL JSONB values. Currently this annotation is always - needed for [JSON][google.spanner.v1.TypeCode.JSON] when a - client interacts with PostgreSQL-enabled Spanner databases. - PG_OID (4): - PostgreSQL compatible OID type. This - annotation can be used by a client interacting - with PostgreSQL-enabled Spanner database to - specify that a value should be treated using the - semantics of the OID type. - """ - TYPE_ANNOTATION_CODE_UNSPECIFIED = 0 - PG_NUMERIC = 2 - PG_JSONB = 3 - PG_OID = 4 - - -class Type(proto.Message): - r"""``Type`` indicates the type of a Cloud Spanner value, as might be - stored in a table cell or returned from an SQL query. - - Attributes: - code (google.cloud.spanner_v1.types.TypeCode): - Required. The [TypeCode][google.spanner.v1.TypeCode] for - this type. - array_element_type (google.cloud.spanner_v1.types.Type): - If [code][google.spanner.v1.Type.code] == - [ARRAY][google.spanner.v1.TypeCode.ARRAY], then - ``array_element_type`` is the type of the array elements. - struct_type (google.cloud.spanner_v1.types.StructType): - If [code][google.spanner.v1.Type.code] == - [STRUCT][google.spanner.v1.TypeCode.STRUCT], then - ``struct_type`` provides type information for the struct's - fields. - type_annotation (google.cloud.spanner_v1.types.TypeAnnotationCode): - The - [TypeAnnotationCode][google.spanner.v1.TypeAnnotationCode] - that disambiguates SQL type that Spanner will use to - represent values of this type during query processing. This - is necessary for some type codes because a single - [TypeCode][google.spanner.v1.TypeCode] can be mapped to - different SQL types depending on the SQL dialect. - [type_annotation][google.spanner.v1.Type.type_annotation] - typically is not needed to process the content of a value - (it doesn't affect serialization) and clients can ignore it - on the read path. - proto_type_fqn (str): - If [code][google.spanner.v1.Type.code] == - [PROTO][google.spanner.v1.TypeCode.PROTO] or - [code][google.spanner.v1.Type.code] == - [ENUM][google.spanner.v1.TypeCode.ENUM], then - ``proto_type_fqn`` is the fully qualified name of the proto - type representing the proto/enum definition. - """ - - code: 'TypeCode' = proto.Field( - proto.ENUM, - number=1, - enum='TypeCode', - ) - array_element_type: 'Type' = proto.Field( - proto.MESSAGE, - number=2, - message='Type', - ) - struct_type: 'StructType' = proto.Field( - proto.MESSAGE, - number=3, - message='StructType', - ) - type_annotation: 'TypeAnnotationCode' = proto.Field( - proto.ENUM, - number=4, - enum='TypeAnnotationCode', - ) - proto_type_fqn: str = proto.Field( - proto.STRING, - number=5, - ) - - -class StructType(proto.Message): - r"""``StructType`` defines the fields of a - [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. - - Attributes: - fields (MutableSequence[google.cloud.spanner_v1.types.StructType.Field]): - The list of fields that make up this struct. Order is - significant, because values of this struct type are - represented as lists, where the order of field values - matches the order of fields in the - [StructType][google.spanner.v1.StructType]. In turn, the - order of fields matches the order of columns in a read - request, or the order of fields in the ``SELECT`` clause of - a query. - """ - - class Field(proto.Message): - r"""Message representing a single field of a struct. - - Attributes: - name (str): - The name of the field. For reads, this is the column name. - For SQL queries, it is the column alias (e.g., ``"Word"`` in - the query ``"SELECT 'hello' AS Word"``), or the column name - (e.g., ``"ColName"`` in the query - ``"SELECT ColName FROM Table"``). Some columns might have an - empty name (e.g., ``"SELECT UPPER(ColName)"``). Note that a - query result can contain multiple fields with the same name. - type_ (google.cloud.spanner_v1.types.Type): - The type of the field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'Type' = proto.Field( - proto.MESSAGE, - number=2, - message='Type', - ) - - fields: MutableSequence[Field] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Field, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/mypy.ini b/owl-bot-staging/spanner/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/spanner/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/spanner/v1/noxfile.py b/owl-bot-staging/spanner/v1/noxfile.py deleted file mode 100644 index 594241417c..0000000000 --- a/owl-bot-staging/spanner/v1/noxfile.py +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] - -DEFAULT_PYTHON_VERSION = "3.14" - -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): -# Switch this to Python 3.15 alpha1 -# https://peps.python.org/pep-0790/ -PREVIEW_PYTHON_VERSION = "3.14" - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-spanner" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=PREVIEW_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json deleted file mode 100644 index f7f33c3d29..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ /dev/null @@ -1,2579 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.spanner.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-spanner", - "version": "0.0.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_create_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchCreateSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "session_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", - "shortName": "batch_create_sessions" - }, - "description": "Sample for BatchCreateSessions", - "file": "spanner_v1_generated_spanner_batch_create_sessions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_create_sessions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.batch_create_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchCreateSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "session_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", - "shortName": "batch_create_sessions" - }, - "description": "Sample for BatchCreateSessions", - "file": "spanner_v1_generated_spanner_batch_create_sessions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_create_sessions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_write", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchWrite", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchWrite" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchWriteRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "mutation_groups", - "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", - "shortName": "batch_write" - }, - "description": "Sample for BatchWrite", - "file": "spanner_v1_generated_spanner_batch_write_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchWrite_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_write_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.batch_write", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchWrite", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchWrite" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchWriteRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "mutation_groups", - "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", - "shortName": "batch_write" - }, - "description": "Sample for BatchWrite", - "file": "spanner_v1_generated_spanner_batch_write_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchWrite_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_write_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.begin_transaction", - "method": { - "fullName": "google.spanner.v1.Spanner.BeginTransaction", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "options", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Transaction", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "spanner_v1_generated_spanner_begin_transaction_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_begin_transaction_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.begin_transaction", - "method": { - "fullName": "google.spanner.v1.Spanner.BeginTransaction", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "options", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Transaction", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "spanner_v1_generated_spanner_begin_transaction_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_begin_transaction_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.commit", - "method": { - "fullName": "google.spanner.v1.Spanner.Commit", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CommitRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "mutations", - "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" - }, - { - "name": "single_use_transaction", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "spanner_v1_generated_spanner_commit_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Commit_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_commit_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.commit", - "method": { - "fullName": "google.spanner.v1.Spanner.Commit", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CommitRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "mutations", - "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" - }, - { - "name": "single_use_transaction", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "spanner_v1_generated_spanner_commit_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Commit_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_commit_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.create_session", - "method": { - "fullName": "google.spanner.v1.Spanner.CreateSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "CreateSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CreateSessionRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "create_session" - }, - "description": "Sample for CreateSession", - "file": "spanner_v1_generated_spanner_create_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_CreateSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_create_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.create_session", - "method": { - "fullName": "google.spanner.v1.Spanner.CreateSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "CreateSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CreateSessionRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "create_session" - }, - "description": "Sample for CreateSession", - "file": "spanner_v1_generated_spanner_create_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_CreateSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_create_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.delete_session", - "method": { - "fullName": "google.spanner.v1.Spanner.DeleteSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "DeleteSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_session" - }, - "description": "Sample for DeleteSession", - "file": "spanner_v1_generated_spanner_delete_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_DeleteSession_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_delete_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.delete_session", - "method": { - "fullName": "google.spanner.v1.Spanner.DeleteSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "DeleteSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_session" - }, - "description": "Sample for DeleteSession", - "file": "spanner_v1_generated_spanner_delete_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_DeleteSession_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_delete_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_batch_dml", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteBatchDml" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", - "shortName": "execute_batch_dml" - }, - "description": "Sample for ExecuteBatchDml", - "file": "spanner_v1_generated_spanner_execute_batch_dml_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_batch_dml_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.execute_batch_dml", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteBatchDml" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", - "shortName": "execute_batch_dml" - }, - "description": "Sample for ExecuteBatchDml", - "file": "spanner_v1_generated_spanner_execute_batch_dml_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_batch_dml_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "execute_sql" - }, - "description": "Sample for ExecuteSql", - "file": "spanner_v1_generated_spanner_execute_sql_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_sql_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.execute_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "execute_sql" - }, - "description": "Sample for ExecuteSql", - "file": "spanner_v1_generated_spanner_execute_sql_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_sql_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_streaming_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteStreamingSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "execute_streaming_sql" - }, - "description": "Sample for ExecuteStreamingSql", - "file": "spanner_v1_generated_spanner_execute_streaming_sql_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_streaming_sql_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.execute_streaming_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteStreamingSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "execute_streaming_sql" - }, - "description": "Sample for ExecuteStreamingSql", - "file": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.get_session", - "method": { - "fullName": "google.spanner.v1.Spanner.GetSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "GetSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.GetSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "get_session" - }, - "description": "Sample for GetSession", - "file": "spanner_v1_generated_spanner_get_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_GetSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_get_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.get_session", - "method": { - "fullName": "google.spanner.v1.Spanner.GetSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "GetSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.GetSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "get_session" - }, - "description": "Sample for GetSession", - "file": "spanner_v1_generated_spanner_get_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_GetSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_get_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.list_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.ListSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ListSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "spanner_v1_generated_spanner_list_sessions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ListSessions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_list_sessions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.list_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.ListSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ListSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "spanner_v1_generated_spanner_list_sessions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ListSessions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_list_sessions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_query", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionQuery", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_query" - }, - "description": "Sample for PartitionQuery", - "file": "spanner_v1_generated_spanner_partition_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.partition_query", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionQuery", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_query" - }, - "description": "Sample for PartitionQuery", - "file": "spanner_v1_generated_spanner_partition_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_read", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_read" - }, - "description": "Sample for PartitionRead", - "file": "spanner_v1_generated_spanner_partition_read_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionRead_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_read_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.partition_read", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_read" - }, - "description": "Sample for PartitionRead", - "file": "spanner_v1_generated_spanner_partition_read_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionRead_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_read_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.read", - "method": { - "fullName": "google.spanner.v1.Spanner.Read", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Read" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "read" - }, - "description": "Sample for Read", - "file": "spanner_v1_generated_spanner_read_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Read_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_read_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.read", - "method": { - "fullName": "google.spanner.v1.Spanner.Read", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Read" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "read" - }, - "description": "Sample for Read", - "file": "spanner_v1_generated_spanner_read_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Read_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_read_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.rollback", - "method": { - "fullName": "google.spanner.v1.Spanner.Rollback", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.RollbackRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "spanner_v1_generated_spanner_rollback_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Rollback_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_rollback_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.rollback", - "method": { - "fullName": "google.spanner.v1.Spanner.Rollback", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.RollbackRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "spanner_v1_generated_spanner_rollback_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Rollback_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_rollback_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.streaming_read", - "method": { - "fullName": "google.spanner.v1.Spanner.StreamingRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "StreamingRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "streaming_read" - }, - "description": "Sample for StreamingRead", - "file": "spanner_v1_generated_spanner_streaming_read_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_StreamingRead_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_streaming_read_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.streaming_read", - "method": { - "fullName": "google.spanner.v1.Spanner.StreamingRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "StreamingRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "streaming_read" - }, - "description": "Sample for StreamingRead", - "file": "spanner_v1_generated_spanner_streaming_read_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_StreamingRead_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_streaming_read_sync.py" - } - ] -} diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py deleted file mode 100644 index 49e64b4ab8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchCreateSessions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = await client.batch_create_sessions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BatchCreateSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py deleted file mode 100644 index ade1da3661..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchCreateSessions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = client.batch_create_sessions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BatchCreateSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py deleted file mode 100644 index d1565657e8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchWrite -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchWrite_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = await client.batch_write(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_BatchWrite_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py deleted file mode 100644 index 9b6621def9..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchWrite -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchWrite_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = client.batch_write(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_BatchWrite_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py deleted file mode 100644 index efdd161715..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BeginTransaction_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BeginTransaction_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py deleted file mode 100644 index 764dab8aa2..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BeginTransaction_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BeginTransaction_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py deleted file mode 100644 index f61c297d38..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Commit_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_commit(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Commit_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py deleted file mode 100644 index a945bd2234..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Commit_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_commit(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Commit_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py deleted file mode 100644 index 8cddc00c66..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_CreateSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_create_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = await client.create_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_CreateSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py deleted file mode 100644 index b9de2d34e0..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_CreateSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_create_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = client.create_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_CreateSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py deleted file mode 100644 index 9fed1ddca6..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_DeleteSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - await client.delete_session(request=request) - - -# [END spanner_v1_generated_Spanner_DeleteSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py deleted file mode 100644 index 1f2a17e2d1..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_DeleteSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - client.delete_session(request=request) - - -# [END spanner_v1_generated_Spanner_DeleteSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py deleted file mode 100644 index 8313fd66a0..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteBatchDml -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteBatchDml_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = await client.execute_batch_dml(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteBatchDml_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py deleted file mode 100644 index dd4696b6b2..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteBatchDml -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteBatchDml_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = client.execute_batch_dml(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteBatchDml_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py deleted file mode 100644 index a12b20f3e9..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteSql_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.execute_sql(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py deleted file mode 100644 index 761d0ca251..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteSql_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.execute_sql(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py deleted file mode 100644 index 86b8eb910e..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteStreamingSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = await client.execute_streaming_sql(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py deleted file mode 100644 index dc7dba43b8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteStreamingSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = client.execute_streaming_sql(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py deleted file mode 100644 index d2e50f9891..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_GetSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_get_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_GetSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py deleted file mode 100644 index 36d6436b04..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_GetSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_get_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = client.get_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_GetSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py deleted file mode 100644 index 95aa4bf818..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ListSessions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_Spanner_ListSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py deleted file mode 100644 index a9533fed0d..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ListSessions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_Spanner_ListSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py deleted file mode 100644 index 200fb2f6a2..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.partition_query(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionQuery_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py deleted file mode 100644 index d486a3590c..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.partition_query(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionQuery_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py deleted file mode 100644 index 99055ade8b..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionRead_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = await client.partition_read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py deleted file mode 100644 index 0ca01ac423..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionRead_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = client.partition_read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionRead_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py deleted file mode 100644 index e555865245..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Read -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Read_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = await client.read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Read_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py deleted file mode 100644 index 8f9ee621f3..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Read -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Read_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = client.read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Read_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py deleted file mode 100644 index f99a1b8dd8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Rollback_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_rollback(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - await client.rollback(request=request) - - -# [END spanner_v1_generated_Spanner_Rollback_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py deleted file mode 100644 index 00b23b21fc..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Rollback_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_rollback(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - client.rollback(request=request) - - -# [END spanner_v1_generated_Spanner_Rollback_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py deleted file mode 100644 index f79b9a96a1..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamingRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_StreamingRead_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = await client.streaming_read(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_StreamingRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py deleted file mode 100644 index f81ed34b33..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamingRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_StreamingRead_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = client.streaming_read(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_StreamingRead_sync] diff --git a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py deleted file mode 100644 index c7f41be11e..0000000000 --- a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py +++ /dev/null @@ -1,191 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spannerCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_create_sessions': ('database', 'session_count', 'session_template', ), - 'batch_write': ('session', 'mutation_groups', 'request_options', 'exclude_txn_from_change_streams', ), - 'begin_transaction': ('session', 'options', 'request_options', 'mutation_key', ), - 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'max_commit_delay', 'request_options', 'precommit_token', ), - 'create_session': ('database', 'session', ), - 'delete_session': ('name', ), - 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', 'last_statements', ), - 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), - 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), - 'get_session': ('name', ), - 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), - 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), - 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), - 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spannerCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner/v1/setup.py b/owl-bot-staging/spanner/v1/setup.py deleted file mode 100644 index 41d728098c..0000000000 --- a/owl-bot-staging/spanner/v1/setup.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-spanner' - - -description = "Google Cloud Spanner API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/spanner/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "grpcio >= 1.33.2, < 2.0.0", - "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt deleted file mode 100644 index 1e93c60e50..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,12 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt deleted file mode 100644 index 1e93c60e50..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt +++ /dev/null @@ -1,12 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt deleted file mode 100644 index 5d29dea386..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/tests/__init__.py b/owl-bot-staging/spanner/v1/tests/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py deleted file mode 100644 index b90788ab0f..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py +++ /dev/null @@ -1,11446 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient -from google.cloud.spanner_v1.services.spanner import SpannerClient -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.services.spanner import transports -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import keys -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SpannerClient._get_default_mtls_endpoint(None) is None - assert SpannerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert SpannerClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert SpannerClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert SpannerClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - SpannerClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert SpannerClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert SpannerClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert SpannerClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - SpannerClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert SpannerClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert SpannerClient._get_client_cert_source(None, False) is None - assert SpannerClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert SpannerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert SpannerClient._get_client_cert_source(None, True) is mock_default_cert_source - assert SpannerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = SpannerClient._DEFAULT_UNIVERSE - default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert SpannerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == SpannerClient.DEFAULT_MTLS_ENDPOINT - assert SpannerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert SpannerClient._get_api_endpoint(None, None, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT - assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT - assert SpannerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert SpannerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - SpannerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert SpannerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert SpannerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert SpannerClient._get_universe_domain(None, None) == SpannerClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - SpannerClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = SpannerClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = SpannerClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (SpannerClient, "grpc"), - (SpannerAsyncClient, "grpc_asyncio"), - (SpannerClient, "rest"), -]) -def test_spanner_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SpannerGrpcTransport, "grpc"), - (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.SpannerRestTransport, "rest"), -]) -def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SpannerClient, "grpc"), - (SpannerAsyncClient, "grpc_asyncio"), - (SpannerClient, "rest"), -]) -def test_spanner_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -def test_spanner_client_get_transport_class(): - transport = SpannerClient.get_transport_class() - available_transports = [ - transports.SpannerGrpcTransport, - transports.SpannerRestTransport, - ] - assert transport in available_transports - - transport = SpannerClient.get_transport_class("grpc") - assert transport == transports.SpannerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), - (SpannerClient, transports.SpannerRestTransport, "rest"), -]) -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -def test_spanner_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", "true"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SpannerClient, transports.SpannerGrpcTransport, "grpc", "false"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (SpannerClient, transports.SpannerRestTransport, "rest", "true"), - (SpannerClient, transports.SpannerRestTransport, "rest", "false"), -]) -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_spanner_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - SpannerClient, SpannerAsyncClient -]) -@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) -def test_spanner_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - SpannerClient, SpannerAsyncClient -]) -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -def test_spanner_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = SpannerClient._DEFAULT_UNIVERSE - default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), - (SpannerClient, transports.SpannerRestTransport, "rest"), -]) -def test_spanner_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (SpannerClient, transports.SpannerRestTransport, "rest", None), -]) -def test_spanner_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_spanner_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SpannerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_spanner_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - scopes=None, - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.CreateSessionRequest, - dict, -]) -def test_create_session(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - response = client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.CreateSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -def test_create_session_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.CreateSessionRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_session(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CreateSessionRequest( - database='database_value', - ) - -def test_create_session_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_session] = mock_rpc - request = {} - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_session in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_session] = mock_rpc - - request = {} - await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_session_async(transport: str = 'grpc_asyncio', request_type=spanner.CreateSessionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - response = await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.CreateSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.asyncio -async def test_create_session_async_from_dict(): - await test_create_session_async(request_type=dict) - -def test_create_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CreateSessionRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = spanner.Session() - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CreateSessionRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_create_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_session( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_create_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_create_session_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_session( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchCreateSessionsRequest, - dict, -]) -def test_batch_create_sessions(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse( - ) - response = client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.BatchCreateSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -def test_batch_create_sessions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.BatchCreateSessionsRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_create_sessions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchCreateSessionsRequest( - database='database_value', - ) - -def test_batch_create_sessions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_create_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc - request = {} - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_create_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_create_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_create_sessions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_create_sessions] = mock_rpc - - request = {} - await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_create_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_create_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchCreateSessionsRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( - )) - response = await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.BatchCreateSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -@pytest.mark.asyncio -async def test_batch_create_sessions_async_from_dict(): - await test_batch_create_sessions_async(request_type=dict) - -def test_batch_create_sessions_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchCreateSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = spanner.BatchCreateSessionsResponse() - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_create_sessions_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchCreateSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) - await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_batch_create_sessions_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_create_sessions( - database='database_value', - session_count=1420, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].session_count - mock_val = 1420 - assert arg == mock_val - - -def test_batch_create_sessions_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - -@pytest.mark.asyncio -async def test_batch_create_sessions_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_create_sessions( - database='database_value', - session_count=1420, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].session_count - mock_val = 1420 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_create_sessions_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.GetSessionRequest, - dict, -]) -def test_get_session(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - response = client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.GetSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -def test_get_session_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.GetSessionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_session(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.GetSessionRequest( - name='name_value', - ) - -def test_get_session_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_session] = mock_rpc - request = {} - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_session in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_session] = mock_rpc - - request = {} - await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_session_async(transport: str = 'grpc_asyncio', request_type=spanner.GetSessionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - response = await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.GetSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.asyncio -async def test_get_session_async_from_dict(): - await test_get_session_async(request_type=dict) - -def test_get_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.GetSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = spanner.Session() - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.GetSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_session_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.ListSessionsRequest, - dict, -]) -def test_list_sessions(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sessions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ListSessionsRequest( - database='database_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_sessions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ListSessionsRequest( - database='database_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_sessions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_sessions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc - - request = {} - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.ListSessionsRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_sessions_async_from_dict(): - await test_list_sessions_async(request_type=dict) - -def test_list_sessions_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ListSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = spanner.ListSessionsResponse() - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_sessions_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ListSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_list_sessions_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sessions( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_list_sessions_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_list_sessions_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sessions( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_sessions_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - - -def test_list_sessions_pager(transport_name: str = "grpc"): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('database', ''), - )), - ) - pager = client.list_sessions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner.Session) - for i in results) -def test_list_sessions_pages(transport_name: str = "grpc"): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sessions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_sessions_async_pager(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sessions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner.Session) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_sessions_async_pages(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_sessions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner.DeleteSessionRequest, - dict, -]) -def test_delete_session(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.DeleteSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_session_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.DeleteSessionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_session(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.DeleteSessionRequest( - name='name_value', - ) - -def test_delete_session_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc - request = {} - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_session in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_session] = mock_rpc - - request = {} - await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_session_async(transport: str = 'grpc_asyncio', request_type=spanner.DeleteSessionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.DeleteSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_session_async_from_dict(): - await test_delete_session_async(request_type=dict) - -def test_delete_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.DeleteSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = None - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.DeleteSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_session_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_sql(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = result_set.ResultSet( - ) - response = client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -def test_execute_sql_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.execute_sql(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - -def test_execute_sql_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc - request = {} - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.execute_sql in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.execute_sql] = mock_rpc - - request = {} - await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - response = await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.asyncio -async def test_execute_sql_async_from_dict(): - await test_execute_sql_async(request_type=dict) - -def test_execute_sql_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_sql_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) - await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_streaming_sql(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([result_set.PartialResultSet()]) - response = client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, result_set.PartialResultSet) - - -def test_execute_streaming_sql_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.execute_streaming_sql(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - -def test_execute_streaming_sql_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_streaming_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc - request = {} - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_streaming_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.execute_streaming_sql in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.execute_streaming_sql] = mock_rpc - - request = {} - await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_streaming_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - response = await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, result_set.PartialResultSet) - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async_from_dict(): - await test_execute_streaming_sql_async(request_type=dict) - -def test_execute_streaming_sql_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteBatchDmlRequest, - dict, -]) -def test_execute_batch_dml(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ExecuteBatchDmlResponse( - ) - response = client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteBatchDmlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -def test_execute_batch_dml_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ExecuteBatchDmlRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.execute_batch_dml(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteBatchDmlRequest( - session='session_value', - ) - -def test_execute_batch_dml_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_batch_dml in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc - request = {} - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_batch_dml(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_batch_dml_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.execute_batch_dml in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.execute_batch_dml] = mock_rpc - - request = {} - await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_batch_dml(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_batch_dml_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteBatchDmlRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( - )) - response = await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteBatchDmlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -@pytest.mark.asyncio -async def test_execute_batch_dml_async_from_dict(): - await test_execute_batch_dml_async(request_type=dict) - -def test_execute_batch_dml_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteBatchDmlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = spanner.ExecuteBatchDmlResponse() - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_batch_dml_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteBatchDmlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse()) - await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_read(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = result_set.ResultSet( - ) - response = client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -def test_read_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.read(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - -def test_read_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.read] = mock_rpc - request = {} - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.read in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.read] = mock_rpc - - request = {} - await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - response = await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.asyncio -async def test_read_async_from_dict(): - await test_read_async(request_type=dict) - -def test_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) - await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_streaming_read(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([result_set.PartialResultSet()]) - response = client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, result_set.PartialResultSet) - - -def test_streaming_read_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.streaming_read(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - -def test_streaming_read_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.streaming_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc - request = {} - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.streaming_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_streaming_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.streaming_read in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.streaming_read] = mock_rpc - - request = {} - await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.streaming_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_streaming_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - response = await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, result_set.PartialResultSet) - - -@pytest.mark.asyncio -async def test_streaming_read_async_from_dict(): - await test_streaming_read_async(request_type=dict) - -def test_streaming_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_streaming_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.BeginTransactionRequest, - dict, -]) -def test_begin_transaction(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction( - id=b'id_blob', - ) - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -def test_begin_transaction_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.BeginTransactionRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.begin_transaction(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BeginTransactionRequest( - session='session_value', - ) - -def test_begin_transaction_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_begin_transaction_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.begin_transaction in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.begin_transaction] = mock_rpc - - request = {} - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=spanner.BeginTransactionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( - id=b'id_blob', - )) - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -@pytest.mark.asyncio -async def test_begin_transaction_async_from_dict(): - await test_begin_transaction_async(request_type=dict) - -def test_begin_transaction_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BeginTransactionRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = transaction.Transaction() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BeginTransactionRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_begin_transaction_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction( - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].options - mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - assert arg == mock_val - - -def test_begin_transaction_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction( - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].options - mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.CommitRequest, - dict, -]) -def test_commit(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse( - ) - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -def test_commit_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.CommitRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.commit(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CommitRequest( - session='session_value', - ) - -def test_commit_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.commit in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.commit] = mock_rpc - request = {} - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.commit in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.commit] = mock_rpc - - request = {} - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_commit_async(transport: str = 'grpc_asyncio', request_type=spanner.CommitRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( - )) - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -@pytest.mark.asyncio -async def test_commit_async_from_dict(): - await test_commit_async(request_type=dict) - -def test_commit_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CommitRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = commit_response.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CommitRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_commit_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutations - mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] - assert arg == mock_val - assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - - -def test_commit_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutations - mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] - assert arg == mock_val - assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.RollbackRequest, - dict, -]) -def test_rollback(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.RollbackRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rollback(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.RollbackRequest( - session='session_value', - ) - -def test_rollback_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rollback in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rollback] = mock_rpc - request = {} - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rollback in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rollback] = mock_rpc - - request = {} - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=spanner.RollbackRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_rollback_async_from_dict(): - await test_rollback_async(request_type=dict) - -def test_rollback_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.RollbackRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = None - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.RollbackRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_rollback_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].transaction_id - mock_val = b'transaction_id_blob' - assert arg == mock_val - - -def test_rollback_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].transaction_id - mock_val = b'transaction_id_blob' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionQueryRequest, - dict, -]) -def test_partition_query(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.PartitionResponse( - ) - response = client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -def test_partition_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.PartitionQueryRequest( - session='session_value', - sql='sql_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.partition_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionQueryRequest( - session='session_value', - sql='sql_value', - ) - -def test_partition_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.partition_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.partition_query] = mock_rpc - - request = {} - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionQueryRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - response = await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.asyncio -async def test_partition_query_async_from_dict(): - await test_partition_query_async(request_type=dict) - -def test_partition_query_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionQueryRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_query_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionQueryRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionReadRequest, - dict, -]) -def test_partition_read(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.PartitionResponse( - ) - response = client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.PartitionReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -def test_partition_read_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.PartitionReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.partition_read(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - -def test_partition_read_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc - request = {} - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.partition_read in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.partition_read] = mock_rpc - - request = {} - await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.partition_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_read_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionReadRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - response = await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.PartitionReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.asyncio -async def test_partition_read_async_from_dict(): - await test_partition_read_async(request_type=dict) - -def test_partition_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) - await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchWriteRequest, - dict, -]) -def test_batch_write(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([spanner.BatchWriteResponse()]) - response = client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, spanner.BatchWriteResponse) - - -def test_batch_write_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.BatchWriteRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_write(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchWriteRequest( - session='session_value', - ) - -def test_batch_write_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc - request = {} - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_write_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_write in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_write] = mock_rpc - - request = {} - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_write_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchWriteRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) - response = await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, spanner.BatchWriteResponse) - - -@pytest.mark.asyncio -async def test_batch_write_async_from_dict(): - await test_batch_write_async(request_type=dict) - -def test_batch_write_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchWriteRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = iter([spanner.BatchWriteResponse()]) - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_write_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchWriteRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_batch_write_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([spanner.BatchWriteResponse()]) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_write( - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutation_groups - mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] - assert arg == mock_val - - -def test_batch_write_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_write( - spanner.BatchWriteRequest(), - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - -@pytest.mark.asyncio -async def test_batch_write_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([spanner.BatchWriteResponse()]) - - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_write( - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutation_groups - mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_write_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_write( - spanner.BatchWriteRequest(), - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - -def test_create_session_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_session] = mock_rpc - - request = {} - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_session_rest_required_fields(request_type=spanner.CreateSessionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_session(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_session_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_session._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "session", ))) - - -def test_create_session_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_session(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) - - -def test_create_session_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - - -def test_batch_create_sessions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_create_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc - - request = {} - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_create_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_create_sessions_rest_required_fields(request_type=spanner.BatchCreateSessionsRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["database"] = "" - request_init["session_count"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - jsonified_request["sessionCount"] = 1420 - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - assert "sessionCount" in jsonified_request - assert jsonified_request["sessionCount"] == 1420 - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.BatchCreateSessionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_create_sessions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_create_sessions_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_create_sessions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "sessionCount", ))) - - -def test_batch_create_sessions_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchCreateSessionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - session_count=1420, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.batch_create_sessions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate" % client.transport._host, args[1]) - - -def test_batch_create_sessions_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - - -def test_get_session_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_session] = mock_rpc - - request = {} - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_session_rest_required_fields(request_type=spanner.GetSessionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_session(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_session_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_session._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_session_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_session(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) - - -def test_get_session_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - - -def test_list_sessions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_sessions_rest_required_fields(request_type=spanner.ListSessionsRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.ListSessionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_sessions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_sessions_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_sessions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("database", ))) - - -def test_list_sessions_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.ListSessionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_sessions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) - - -def test_list_sessions_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - - -def test_list_sessions_rest_pager(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner.ListSessionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - pager = client.list_sessions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner.Session) - for i in results) - - pages = list(client.list_sessions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_session_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc - - request = {} - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_session_rest_required_fields(request_type=spanner.DeleteSessionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_session(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_session_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_session._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_session_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_session(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) - - -def test_delete_session_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - - -def test_execute_sql_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc - - request = {} - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_execute_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["sql"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["sql"] = 'sql_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "sql" in jsonified_request - assert jsonified_request["sql"] == 'sql_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.execute_sql(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_execute_sql_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.execute_sql._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "sql", ))) - - -def test_execute_streaming_sql_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_streaming_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc - - request = {} - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_streaming_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_execute_streaming_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["sql"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["sql"] = 'sql_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "sql" in jsonified_request - assert jsonified_request["sql"] == 'sql_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.execute_streaming_sql(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_execute_streaming_sql_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.execute_streaming_sql._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "sql", ))) - - -def test_execute_batch_dml_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_batch_dml in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc - - request = {} - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_batch_dml(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_execute_batch_dml_rest_required_fields(request_type=spanner.ExecuteBatchDmlRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["seqno"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["seqno"] = 550 - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "seqno" in jsonified_request - assert jsonified_request["seqno"] == 550 - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.ExecuteBatchDmlResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.execute_batch_dml(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_execute_batch_dml_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.execute_batch_dml._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "transaction", "statements", "seqno", ))) - - -def test_read_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.read] = mock_rpc - - request = {} - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_read_rest_required_fields(request_type=spanner.ReadRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["table"] = "" - request_init["columns"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["table"] = 'table_value' - jsonified_request["columns"] = 'columns_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "table" in jsonified_request - assert jsonified_request["table"] == 'table_value' - assert "columns" in jsonified_request - assert jsonified_request["columns"] == 'columns_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.read(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_read_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.read._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) - - -def test_streaming_read_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.streaming_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc - - request = {} - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.streaming_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_streaming_read_rest_required_fields(request_type=spanner.ReadRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["table"] = "" - request_init["columns"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["table"] = 'table_value' - jsonified_request["columns"] = 'columns_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "table" in jsonified_request - assert jsonified_request["table"] == 'table_value' - assert "columns" in jsonified_request - assert jsonified_request["columns"] == 'columns_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.streaming_read(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_streaming_read_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.streaming_read._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) - - -def test_begin_transaction_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc - - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_begin_transaction_rest_required_fields(request_type=spanner.BeginTransactionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = transaction.Transaction() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.begin_transaction(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_begin_transaction_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.begin_transaction._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "options", ))) - - -def test_begin_transaction_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transaction.Transaction() - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.begin_transaction(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction" % client.transport._host, args[1]) - - -def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -def test_commit_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.commit in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.commit] = mock_rpc - - request = {} - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_commit_rest_required_fields(request_type=spanner.CommitRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.commit(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_commit_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.commit._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", ))) - - -def test_commit_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.commit(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit" % client.transport._host, args[1]) - - -def test_commit_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -def test_rollback_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rollback in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rollback] = mock_rpc - - request = {} - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rollback_rest_required_fields(request_type=spanner.RollbackRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["transaction_id"] = b'' - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["transactionId"] = b'transaction_id_blob' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "transactionId" in jsonified_request - assert jsonified_request["transactionId"] == b'transaction_id_blob' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rollback(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rollback_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rollback._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "transactionId", ))) - - -def test_rollback_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rollback(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback" % client.transport._host, args[1]) - - -def test_rollback_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - -def test_partition_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_partition_query_rest_required_fields(request_type=spanner.PartitionQueryRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["sql"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["sql"] = 'sql_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "sql" in jsonified_request - assert jsonified_request["sql"] == 'sql_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.partition_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_partition_query_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.partition_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "sql", ))) - - -def test_partition_read_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc - - request = {} - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_partition_read_rest_required_fields(request_type=spanner.PartitionReadRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["table"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["table"] = 'table_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "table" in jsonified_request - assert jsonified_request["table"] == 'table_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.partition_read(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_partition_read_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.partition_read._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "table", "keySet", ))) - - -def test_batch_write_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc - - request = {} - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_write_rest_required_fields(request_type=spanner.BatchWriteRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.BatchWriteResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.batch_write(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_write_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_write._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "mutationGroups", ))) - - -def test_batch_write_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchWriteResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - client.batch_write(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite" % client.transport._host, args[1]) - - -def test_batch_write_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_write( - spanner.BatchWriteRequest(), - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SpannerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SpannerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SpannerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SpannerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - transports.SpannerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = SpannerClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_session_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = spanner.Session() - client.create_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CreateSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_create_sessions_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = spanner.BatchCreateSessionsResponse() - client.batch_create_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchCreateSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_session_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = spanner.Session() - client.get_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.GetSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = spanner.ListSessionsResponse() - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ListSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_session_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = None - client.delete_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.DeleteSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_sql_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.execute_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_streaming_sql_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.execute_streaming_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_batch_dml_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = spanner.ExecuteBatchDmlResponse() - client.execute_batch_dml(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteBatchDmlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_read_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_streaming_read_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.streaming_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_begin_transaction_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = transaction.Transaction() - client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_commit_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = commit_response.CommitResponse() - client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rollback_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = None - client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_query_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_read_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_write_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = iter([spanner.BatchWriteResponse()]) - client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchWriteRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = SpannerAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_session_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - await client.create_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CreateSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_create_sessions_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( - )) - await client.batch_create_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchCreateSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_session_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - await client.get_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.GetSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_sessions_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ListSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_session_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.DeleteSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_sql_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - await client.execute_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_streaming_sql_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.execute_streaming_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_batch_dml_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( - )) - await client.execute_batch_dml(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteBatchDmlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_read_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - await client.read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_streaming_read_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.streaming_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_begin_transaction_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( - id=b'id_blob', - )) - await client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_commit_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( - )) - await client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rollback_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_partition_query_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - await client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_partition_read_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - await client.partition_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_write_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) - await client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchWriteRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = SpannerClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_session_rest_bad_request(request_type=spanner.CreateSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_session(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.CreateSessionRequest, - dict, -]) -def test_create_session_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_session(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_session_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_create_session") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_create_session_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_create_session") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.CreateSessionRequest.pb(spanner.CreateSessionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.Session.to_json(spanner.Session()) - req.return_value.content = return_value - - request = spanner.CreateSessionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.Session() - post_with_metadata.return_value = spanner.Session(), metadata - - client.create_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_create_sessions_rest_bad_request(request_type=spanner.BatchCreateSessionsRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_create_sessions(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchCreateSessionsRequest, - dict, -]) -def test_batch_create_sessions_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchCreateSessionsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_create_sessions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_create_sessions_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_create_sessions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.BatchCreateSessionsRequest.pb(spanner.BatchCreateSessionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.BatchCreateSessionsResponse.to_json(spanner.BatchCreateSessionsResponse()) - req.return_value.content = return_value - - request = spanner.BatchCreateSessionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.BatchCreateSessionsResponse() - post_with_metadata.return_value = spanner.BatchCreateSessionsResponse(), metadata - - client.batch_create_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_session_rest_bad_request(request_type=spanner.GetSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_session(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.GetSessionRequest, - dict, -]) -def test_get_session_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_session(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_session_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_get_session") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_get_session_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_get_session") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.GetSessionRequest.pb(spanner.GetSessionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.Session.to_json(spanner.Session()) - req.return_value.content = return_value - - request = spanner.GetSessionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.Session() - post_with_metadata.return_value = spanner.Session(), metadata - - client.get_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_sessions_rest_bad_request(request_type=spanner.ListSessionsRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_sessions(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ListSessionsRequest, - dict, -]) -def test_list_sessions_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_sessions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sessions_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_list_sessions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ListSessionsRequest.pb(spanner.ListSessionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.ListSessionsResponse.to_json(spanner.ListSessionsResponse()) - req.return_value.content = return_value - - request = spanner.ListSessionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.ListSessionsResponse() - post_with_metadata.return_value = spanner.ListSessionsResponse(), metadata - - client.list_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_session_rest_bad_request(request_type=spanner.DeleteSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_session(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.DeleteSessionRequest, - dict, -]) -def test_delete_session_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_session(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_session_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_delete_session") as pre: - pre.assert_not_called() - pb_message = spanner.DeleteSessionRequest.pb(spanner.DeleteSessionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner.DeleteSessionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_execute_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_sql(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_sql_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_sql(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_sql_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_sql") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.ResultSet.to_json(result_set.ResultSet()) - req.return_value.content = return_value - - request = spanner.ExecuteSqlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.ResultSet() - post_with_metadata.return_value = result_set.ResultSet(), metadata - - client.execute_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_execute_streaming_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_streaming_sql(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_streaming_sql_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet( - chunked_value=True, - resume_token=b'resume_token_blob', - last=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_streaming_sql(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.PartialResultSet) - assert response.chunked_value is True - assert response.resume_token == b'resume_token_blob' - assert response.last is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_streaming_sql_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_streaming_sql") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = spanner.ExecuteSqlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.PartialResultSet() - post_with_metadata.return_value = result_set.PartialResultSet(), metadata - - client.execute_streaming_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_execute_batch_dml_rest_bad_request(request_type=spanner.ExecuteBatchDmlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_batch_dml(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteBatchDmlRequest, - dict, -]) -def test_execute_batch_dml_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.ExecuteBatchDmlResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_batch_dml(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_batch_dml_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_batch_dml") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ExecuteBatchDmlRequest.pb(spanner.ExecuteBatchDmlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.ExecuteBatchDmlResponse.to_json(spanner.ExecuteBatchDmlResponse()) - req.return_value.content = return_value - - request = spanner.ExecuteBatchDmlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.ExecuteBatchDmlResponse() - post_with_metadata.return_value = spanner.ExecuteBatchDmlResponse(), metadata - - client.execute_batch_dml(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_read_rest_bad_request(request_type=spanner.ReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.read(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_read_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.read(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_read_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_read") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_read_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_read") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.ResultSet.to_json(result_set.ResultSet()) - req.return_value.content = return_value - - request = spanner.ReadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.ResultSet() - post_with_metadata.return_value = result_set.ResultSet(), metadata - - client.read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_streaming_read_rest_bad_request(request_type=spanner.ReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.streaming_read(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_streaming_read_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet( - chunked_value=True, - resume_token=b'resume_token_blob', - last=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.streaming_read(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.PartialResultSet) - assert response.chunked_value is True - assert response.resume_token == b'resume_token_blob' - assert response.last is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_streaming_read_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_streaming_read") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = spanner.ReadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.PartialResultSet() - post_with_metadata.return_value = result_set.PartialResultSet(), metadata - - client.streaming_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_begin_transaction_rest_bad_request(request_type=spanner.BeginTransactionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.begin_transaction(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.BeginTransactionRequest, - dict, -]) -def test_begin_transaction_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transaction.Transaction( - id=b'id_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_begin_transaction") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.BeginTransactionRequest.pb(spanner.BeginTransactionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = transaction.Transaction.to_json(transaction.Transaction()) - req.return_value.content = return_value - - request = spanner.BeginTransactionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = transaction.Transaction() - post_with_metadata.return_value = transaction.Transaction(), metadata - - client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_commit_rest_bad_request(request_type=spanner.CommitRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.commit(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.CommitRequest, - dict, -]) -def test_commit_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_commit") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_commit_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_commit") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.CommitRequest.pb(spanner.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = commit_response.CommitResponse.to_json(commit_response.CommitResponse()) - req.return_value.content = return_value - - request = spanner.CommitRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = commit_response.CommitResponse() - post_with_metadata.return_value = commit_response.CommitResponse(), metadata - - client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rollback_rest_bad_request(request_type=spanner.RollbackRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rollback(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.RollbackRequest, - dict, -]) -def test_rollback_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_rollback") as pre: - pre.assert_not_called() - pb_message = spanner.RollbackRequest.pb(spanner.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner.RollbackRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_partition_query_rest_bad_request(request_type=spanner.PartitionQueryRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.partition_query(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionQueryRequest, - dict, -]) -def test_partition_query_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.partition_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_query_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_query") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.PartitionQueryRequest.pb(spanner.PartitionQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) - req.return_value.content = return_value - - request = spanner.PartitionQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.PartitionResponse() - post_with_metadata.return_value = spanner.PartitionResponse(), metadata - - client.partition_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_partition_read_rest_bad_request(request_type=spanner.PartitionReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.partition_read(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionReadRequest, - dict, -]) -def test_partition_read_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.partition_read(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_read_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_read") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.PartitionReadRequest.pb(spanner.PartitionReadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) - req.return_value.content = return_value - - request = spanner.PartitionReadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.PartitionResponse() - post_with_metadata.return_value = spanner.PartitionResponse(), metadata - - client.partition_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_write_rest_bad_request(request_type=spanner.BatchWriteRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_write(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchWriteRequest, - dict, -]) -def test_batch_write_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchWriteResponse( - indexes=[752], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_write(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchWriteResponse) - assert response.indexes == [752] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_write_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_write") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.BatchWriteRequest.pb(spanner.BatchWriteRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.BatchWriteResponse.to_json(spanner.BatchWriteResponse()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = spanner.BatchWriteRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.BatchWriteResponse() - post_with_metadata.return_value = spanner.BatchWriteResponse(), metadata - - client.batch_write(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_session_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - client.create_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CreateSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_create_sessions_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - client.batch_create_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchCreateSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_session_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - client.get_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.GetSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ListSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_session_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - client.delete_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.DeleteSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_sql_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - client.execute_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_streaming_sql_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - client.execute_streaming_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_batch_dml_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - client.execute_batch_dml(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteBatchDmlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_read_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - client.read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_streaming_read_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - client.streaming_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_begin_transaction_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_commit_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rollback_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_query_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_read_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - client.partition_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_write_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchWriteRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SpannerGrpcTransport, - ) - -def test_spanner_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SpannerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_spanner_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.SpannerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_session', - 'batch_create_sessions', - 'get_session', - 'list_sessions', - 'delete_session', - 'execute_sql', - 'execute_streaming_sql', - 'execute_batch_dml', - 'read', - 'streaming_read', - 'begin_transaction', - 'commit', - 'rollback', - 'partition_query', - 'partition_read', - 'batch_write', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_spanner_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - quota_project_id="octopus", - ) - - -def test_spanner_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport() - adc.assert_called_once() - - -def test_spanner_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SpannerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - ], -) -def test_spanner_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.data',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - transports.SpannerRestTransport, - ], -) -def test_spanner_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SpannerGrpcTransport, grpc_helpers), - (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_spanner_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_spanner_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.SpannerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_spanner_host_no_port(transport_name): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_spanner_host_with_port(transport_name): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_spanner_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = SpannerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = SpannerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_session._session - session2 = client2.transport.create_session._session - assert session1 != session2 - session1 = client1.transport.batch_create_sessions._session - session2 = client2.transport.batch_create_sessions._session - assert session1 != session2 - session1 = client1.transport.get_session._session - session2 = client2.transport.get_session._session - assert session1 != session2 - session1 = client1.transport.list_sessions._session - session2 = client2.transport.list_sessions._session - assert session1 != session2 - session1 = client1.transport.delete_session._session - session2 = client2.transport.delete_session._session - assert session1 != session2 - session1 = client1.transport.execute_sql._session - session2 = client2.transport.execute_sql._session - assert session1 != session2 - session1 = client1.transport.execute_streaming_sql._session - session2 = client2.transport.execute_streaming_sql._session - assert session1 != session2 - session1 = client1.transport.execute_batch_dml._session - session2 = client2.transport.execute_batch_dml._session - assert session1 != session2 - session1 = client1.transport.read._session - session2 = client2.transport.read._session - assert session1 != session2 - session1 = client1.transport.streaming_read._session - session2 = client2.transport.streaming_read._session - assert session1 != session2 - session1 = client1.transport.begin_transaction._session - session2 = client2.transport.begin_transaction._session - assert session1 != session2 - session1 = client1.transport.commit._session - session2 = client2.transport.commit._session - assert session1 != session2 - session1 = client1.transport.rollback._session - session2 = client2.transport.rollback._session - assert session1 != session2 - session1 = client1.transport.partition_query._session - session2 = client2.transport.partition_query._session - assert session1 != session2 - session1 = client1.transport.partition_read._session - session2 = client2.transport.partition_read._session - assert session1 != session2 - session1 = client1.transport.batch_write._session - session2 = client2.transport.batch_write._session - assert session1 != session2 -def test_spanner_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SpannerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_spanner_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SpannerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_database_path(): - project = "squid" - instance = "clam" - database = "whelk" - expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - actual = SpannerClient.database_path(project, instance, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "database": "nudibranch", - } - path = SpannerClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_database_path(path) - assert expected == actual - -def test_session_path(): - project = "cuttlefish" - instance = "mussel" - database = "winkle" - session = "nautilus" - expected = "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) - actual = SpannerClient.session_path(project, instance, database, session) - assert expected == actual - - -def test_parse_session_path(): - expected = { - "project": "scallop", - "instance": "abalone", - "database": "squid", - "session": "clam", - } - path = SpannerClient.session_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_session_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SpannerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = SpannerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = SpannerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = SpannerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SpannerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = SpannerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = SpannerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = SpannerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SpannerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = SpannerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: - transport_class = SpannerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (SpannerClient, transports.SpannerGrpcTransport), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/spanner_admin_database/v1/.coveragerc b/owl-bot-staging/spanner_admin_database/v1/.coveragerc deleted file mode 100644 index 6085d54da4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner_admin_database/__init__.py - google/cloud/spanner_admin_database/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/spanner_admin_database/v1/.flake8 b/owl-bot-staging/spanner_admin_database/v1/.flake8 deleted file mode 100644 index 90316de214..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/spanner_admin_database/v1/LICENSE b/owl-bot-staging/spanner_admin_database/v1/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in deleted file mode 100644 index dae249ec89..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_database/v1/README.rst b/owl-bot-staging/spanner_admin_database/v1/README.rst deleted file mode 100644 index 5cfc4ab969..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Spanner Admin Database API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner Admin Database API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcf..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py deleted file mode 100644 index 4b16cc5e97..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner-admin-database documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner-admin-database" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-database", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-admin-database-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-database.tex", - u"google-cloud-spanner-admin-database Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-spanner-admin-database", - "google-cloud-spanner-admin-database Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-database", - "google-cloud-spanner-admin-database Documentation", - author, - "google-cloud-spanner-admin-database", - "google-cloud-spanner-admin-database Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst deleted file mode 100644 index 6f79e3f3d4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_admin_database_v1/services_ - spanner_admin_database_v1/types_ diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst deleted file mode 100644 index bd6aab00e4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -DatabaseAdmin -------------------------------- - -.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst deleted file mode 100644 index 55e57d8dc0..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner Admin Database v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - database_admin diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst deleted file mode 100644 index fe6c27778b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Spanner Admin Database v1 API -==================================================== - -.. automodule:: google.cloud.spanner_admin_database_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py deleted file mode 100644 index aa445b5622..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_database import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.spanner_admin_database_v1.services.database_admin.client import DatabaseAdminClient -from google.cloud.spanner_admin_database_v1.services.database_admin.async_client import DatabaseAdminAsyncClient - -from google.cloud.spanner_admin_database_v1.types.backup import Backup -from google.cloud.spanner_admin_database_v1.types.backup import BackupInfo -from google.cloud.spanner_admin_database_v1.types.backup import BackupInstancePartition -from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupMetadata -from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupMetadata -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import DeleteBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import FullBackupSpec -from google.cloud.spanner_admin_database_v1.types.backup import GetBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import IncrementalBackupSpec -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsResponse -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsResponse -from google.cloud.spanner_admin_database_v1.types.backup import UpdateBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupSchedule -from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupScheduleSpec -from google.cloud.spanner_admin_database_v1.types.backup_schedule import CreateBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import CrontabSpec -from google.cloud.spanner_admin_database_v1.types.backup_schedule import DeleteBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import GetBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesResponse -from google.cloud.spanner_admin_database_v1.types.backup_schedule import UpdateBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.common import EncryptionConfig -from google.cloud.spanner_admin_database_v1.types.common import EncryptionInfo -from google.cloud.spanner_admin_database_v1.types.common import OperationProgress -from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import Database -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DatabaseRole -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DdlStatementActionInfo -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DropDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import OptimizeRestoredDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreInfo -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import SplitPoints -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreSourceType - -__all__ = ('DatabaseAdminClient', - 'DatabaseAdminAsyncClient', - 'Backup', - 'BackupInfo', - 'BackupInstancePartition', - 'CopyBackupEncryptionConfig', - 'CopyBackupMetadata', - 'CopyBackupRequest', - 'CreateBackupEncryptionConfig', - 'CreateBackupMetadata', - 'CreateBackupRequest', - 'DeleteBackupRequest', - 'FullBackupSpec', - 'GetBackupRequest', - 'IncrementalBackupSpec', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'UpdateBackupRequest', - 'BackupSchedule', - 'BackupScheduleSpec', - 'CreateBackupScheduleRequest', - 'CrontabSpec', - 'DeleteBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'UpdateBackupScheduleRequest', - 'EncryptionConfig', - 'EncryptionInfo', - 'OperationProgress', - 'DatabaseDialect', - 'AddSplitPointsRequest', - 'AddSplitPointsResponse', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'Database', - 'DatabaseRole', - 'DdlStatementActionInfo', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'GetDatabaseRequest', - 'InternalUpdateGraphOperationRequest', - 'InternalUpdateGraphOperationResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'ListDatabaseRolesRequest', - 'ListDatabaseRolesResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'OptimizeRestoredDatabaseMetadata', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'RestoreDatabaseRequest', - 'RestoreInfo', - 'SplitPoints', - 'UpdateDatabaseDdlMetadata', - 'UpdateDatabaseDdlRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseRequest', - 'RestoreSourceType', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed deleted file mode 100644 index 29f334aad6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py deleted file mode 100644 index 6605eff74d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py +++ /dev/null @@ -1,150 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.database_admin import DatabaseAdminClient -from .services.database_admin import DatabaseAdminAsyncClient - -from .types.backup import Backup -from .types.backup import BackupInfo -from .types.backup import BackupInstancePartition -from .types.backup import CopyBackupEncryptionConfig -from .types.backup import CopyBackupMetadata -from .types.backup import CopyBackupRequest -from .types.backup import CreateBackupEncryptionConfig -from .types.backup import CreateBackupMetadata -from .types.backup import CreateBackupRequest -from .types.backup import DeleteBackupRequest -from .types.backup import FullBackupSpec -from .types.backup import GetBackupRequest -from .types.backup import IncrementalBackupSpec -from .types.backup import ListBackupOperationsRequest -from .types.backup import ListBackupOperationsResponse -from .types.backup import ListBackupsRequest -from .types.backup import ListBackupsResponse -from .types.backup import UpdateBackupRequest -from .types.backup_schedule import BackupSchedule -from .types.backup_schedule import BackupScheduleSpec -from .types.backup_schedule import CreateBackupScheduleRequest -from .types.backup_schedule import CrontabSpec -from .types.backup_schedule import DeleteBackupScheduleRequest -from .types.backup_schedule import GetBackupScheduleRequest -from .types.backup_schedule import ListBackupSchedulesRequest -from .types.backup_schedule import ListBackupSchedulesResponse -from .types.backup_schedule import UpdateBackupScheduleRequest -from .types.common import EncryptionConfig -from .types.common import EncryptionInfo -from .types.common import OperationProgress -from .types.common import DatabaseDialect -from .types.spanner_database_admin import AddSplitPointsRequest -from .types.spanner_database_admin import AddSplitPointsResponse -from .types.spanner_database_admin import CreateDatabaseMetadata -from .types.spanner_database_admin import CreateDatabaseRequest -from .types.spanner_database_admin import Database -from .types.spanner_database_admin import DatabaseRole -from .types.spanner_database_admin import DdlStatementActionInfo -from .types.spanner_database_admin import DropDatabaseRequest -from .types.spanner_database_admin import GetDatabaseDdlRequest -from .types.spanner_database_admin import GetDatabaseDdlResponse -from .types.spanner_database_admin import GetDatabaseRequest -from .types.spanner_database_admin import InternalUpdateGraphOperationRequest -from .types.spanner_database_admin import InternalUpdateGraphOperationResponse -from .types.spanner_database_admin import ListDatabaseOperationsRequest -from .types.spanner_database_admin import ListDatabaseOperationsResponse -from .types.spanner_database_admin import ListDatabaseRolesRequest -from .types.spanner_database_admin import ListDatabaseRolesResponse -from .types.spanner_database_admin import ListDatabasesRequest -from .types.spanner_database_admin import ListDatabasesResponse -from .types.spanner_database_admin import OptimizeRestoredDatabaseMetadata -from .types.spanner_database_admin import RestoreDatabaseEncryptionConfig -from .types.spanner_database_admin import RestoreDatabaseMetadata -from .types.spanner_database_admin import RestoreDatabaseRequest -from .types.spanner_database_admin import RestoreInfo -from .types.spanner_database_admin import SplitPoints -from .types.spanner_database_admin import UpdateDatabaseDdlMetadata -from .types.spanner_database_admin import UpdateDatabaseDdlRequest -from .types.spanner_database_admin import UpdateDatabaseMetadata -from .types.spanner_database_admin import UpdateDatabaseRequest -from .types.spanner_database_admin import RestoreSourceType - -__all__ = ( - 'DatabaseAdminAsyncClient', -'AddSplitPointsRequest', -'AddSplitPointsResponse', -'Backup', -'BackupInfo', -'BackupInstancePartition', -'BackupSchedule', -'BackupScheduleSpec', -'CopyBackupEncryptionConfig', -'CopyBackupMetadata', -'CopyBackupRequest', -'CreateBackupEncryptionConfig', -'CreateBackupMetadata', -'CreateBackupRequest', -'CreateBackupScheduleRequest', -'CreateDatabaseMetadata', -'CreateDatabaseRequest', -'CrontabSpec', -'Database', -'DatabaseAdminClient', -'DatabaseDialect', -'DatabaseRole', -'DdlStatementActionInfo', -'DeleteBackupRequest', -'DeleteBackupScheduleRequest', -'DropDatabaseRequest', -'EncryptionConfig', -'EncryptionInfo', -'FullBackupSpec', -'GetBackupRequest', -'GetBackupScheduleRequest', -'GetDatabaseDdlRequest', -'GetDatabaseDdlResponse', -'GetDatabaseRequest', -'IncrementalBackupSpec', -'InternalUpdateGraphOperationRequest', -'InternalUpdateGraphOperationResponse', -'ListBackupOperationsRequest', -'ListBackupOperationsResponse', -'ListBackupSchedulesRequest', -'ListBackupSchedulesResponse', -'ListBackupsRequest', -'ListBackupsResponse', -'ListDatabaseOperationsRequest', -'ListDatabaseOperationsResponse', -'ListDatabaseRolesRequest', -'ListDatabaseRolesResponse', -'ListDatabasesRequest', -'ListDatabasesResponse', -'OperationProgress', -'OptimizeRestoredDatabaseMetadata', -'RestoreDatabaseEncryptionConfig', -'RestoreDatabaseMetadata', -'RestoreDatabaseRequest', -'RestoreInfo', -'RestoreSourceType', -'SplitPoints', -'UpdateBackupRequest', -'UpdateBackupScheduleRequest', -'UpdateDatabaseDdlMetadata', -'UpdateDatabaseDdlRequest', -'UpdateDatabaseMetadata', -'UpdateDatabaseRequest', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json deleted file mode 100644 index 027a4f612b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json +++ /dev/null @@ -1,433 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_admin_database_v1", - "protoPackage": "google.spanner.admin.database.v1", - "schema": "1.0", - "services": { - "DatabaseAdmin": { - "clients": { - "grpc": { - "libraryClient": "DatabaseAdminClient", - "rpcs": { - "AddSplitPoints": { - "methods": [ - "add_split_points" - ] - }, - "CopyBackup": { - "methods": [ - "copy_backup" - ] - }, - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "InternalUpdateGraphOperation": { - "methods": [ - "internal_update_graph_operation" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabaseRoles": { - "methods": [ - "list_database_roles" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DatabaseAdminAsyncClient", - "rpcs": { - "AddSplitPoints": { - "methods": [ - "add_split_points" - ] - }, - "CopyBackup": { - "methods": [ - "copy_backup" - ] - }, - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "InternalUpdateGraphOperation": { - "methods": [ - "internal_update_graph_operation" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabaseRoles": { - "methods": [ - "list_database_roles" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - }, - "rest": { - "libraryClient": "DatabaseAdminClient", - "rpcs": { - "AddSplitPoints": { - "methods": [ - "add_split_points" - ] - }, - "CopyBackup": { - "methods": [ - "copy_backup" - ] - }, - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "InternalUpdateGraphOperation": { - "methods": [ - "internal_update_graph_operation" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabaseRoles": { - "methods": [ - "list_database_roles" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed deleted file mode 100644 index 29f334aad6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py deleted file mode 100644 index 85e225bbd8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DatabaseAdminClient -from .async_client import DatabaseAdminAsyncClient - -__all__ = ( - 'DatabaseAdminClient', - 'DatabaseAdminAsyncClient', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py deleted file mode 100644 index b5e733e3a6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ /dev/null @@ -1,3968 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import uuid - -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .client import DatabaseAdminClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DatabaseAdminAsyncClient: - """Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - """ - - _client: DatabaseAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DatabaseAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DatabaseAdminClient._DEFAULT_UNIVERSE - - backup_path = staticmethod(DatabaseAdminClient.backup_path) - parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) - backup_schedule_path = staticmethod(DatabaseAdminClient.backup_schedule_path) - parse_backup_schedule_path = staticmethod(DatabaseAdminClient.parse_backup_schedule_path) - crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) - parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) - crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) - parse_crypto_key_version_path = staticmethod(DatabaseAdminClient.parse_crypto_key_version_path) - database_path = staticmethod(DatabaseAdminClient.database_path) - parse_database_path = staticmethod(DatabaseAdminClient.parse_database_path) - database_role_path = staticmethod(DatabaseAdminClient.database_role_path) - parse_database_role_path = staticmethod(DatabaseAdminClient.parse_database_role_path) - instance_path = staticmethod(DatabaseAdminClient.instance_path) - parse_instance_path = staticmethod(DatabaseAdminClient.parse_instance_path) - instance_partition_path = staticmethod(DatabaseAdminClient.instance_partition_path) - parse_instance_partition_path = staticmethod(DatabaseAdminClient.parse_instance_partition_path) - common_billing_account_path = staticmethod(DatabaseAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DatabaseAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DatabaseAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(DatabaseAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(DatabaseAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(DatabaseAdminClient.parse_common_organization_path) - common_project_path = staticmethod(DatabaseAdminClient.common_project_path) - parse_common_project_path = staticmethod(DatabaseAdminClient.parse_common_project_path) - common_location_path = staticmethod(DatabaseAdminClient.common_location_path) - parse_common_location_path = staticmethod(DatabaseAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminAsyncClient: The constructed client. - """ - return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminAsyncClient: The constructed client. - """ - return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DatabaseAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DatabaseAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatabaseAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DatabaseAdminClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the database admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DatabaseAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DatabaseAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.database_v1.DatabaseAdminAsyncClient`.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "credentialsType": None, - } - ) - - async def list_databases(self, - request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesAsyncPager: - r"""Lists Cloud Spanner databases. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]]): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - parent (:class:`str`): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabasesRequest): - request = spanner_database_admin.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabasesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_database(self, - request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - create_statement: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]]): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - parent (:class:`str`): - Required. The name of the instance that will serve the - new database. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - create_statement (:class:`str`): - Required. A ``CREATE DATABASE`` statement, which - specifies the ID of the new database. The database ID - must conform to the regular expression - ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 - characters in length. If the database ID is a reserved - word or if it contains a hyphen, the database ID must be - enclosed in backticks (:literal:`\``). - - This corresponds to the ``create_statement`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, create_statement] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): - request = spanner_database_admin.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if create_statement is not None: - request.create_statement = create_statement - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def get_database(self, - request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.Database: - r"""Gets the state of a Cloud Spanner database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]]): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - name (:class:`str`): - Required. The name of the requested database. Values are - of the form - ``projects//instances//databases/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Database: - A Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseRequest): - request = spanner_database_admin.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[spanner_database_admin.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]]): - The request object. The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - database (:class:`google.cloud.spanner_admin_database_v1.types.Database`): - Required. The database to update. The ``name`` field of - the database is of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to update. Currently, only - ``enable_drop_protection`` field can be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): - request = spanner_database_admin.UpdateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def update_database_ddl(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - statements: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]]): - The request object. Enqueues the given DDL statements to be applied, in - order but not necessarily all at once, to the database - schema at some point (or points) in the future. The - server checks that the statements are executable - (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - ``NULL`` value in a column to which ``NOT NULL`` would - be added). If a statement fails, all subsequent - statements in the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - database (:class:`str`): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - statements (:class:`MutableSequence[str]`): - Required. DDL statements to be - applied to the database. - - This corresponds to the ``statements`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, statements] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): - request = spanner_database_admin.UpdateDatabaseDdlRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if statements: - request.statements.extend(statements) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, - ) - - # Done; return the response. - return response - - async def drop_database(self, - request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - await client.drop_database(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]]): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - database (:class:`str`): - Required. The database to be dropped. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.DropDatabaseRequest): - request = spanner_database_admin.DropDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.drop_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_database_ddl(self, - request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = await client.get_database_ddl(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]]): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - database (:class:`str`): - Required. The database whose schema we wish to get. - Values are of the form - ``projects//instances//databases/`` - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): - request = spanner_database_admin.GetDatabaseDdlRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`MutableSequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_backup(self, - request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup: Optional[gsad_backup.Backup] = None, - backup_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]]): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - parent (:class:`str`): - Required. The name of the instance in which the backup - will be created. This must be the same instance that - contains the database the backup will be created from. - The backup will be stored in the location(s) specified - in the instance configuration of this instance. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): - Required. The backup to create. - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (:class:`str`): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full - backup name of the form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup, backup_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.CreateBackupRequest): - request = gsad_backup.CreateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup is not None: - request.backup = backup - if backup_id is not None: - request.backup_id = backup_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gsad_backup.Backup, - metadata_type=gsad_backup.CreateBackupMetadata, - ) - - # Done; return the response. - return response - - async def copy_backup(self, - request: Optional[Union[backup.CopyBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_id: Optional[str] = None, - source_backup: Optional[str] = None, - expire_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]]): - The request object. The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - parent (:class:`str`): - Required. The name of the destination instance that will - contain the backup copy. Values are of the form: - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (:class:`str`): - Required. The id of the backup copy. The ``backup_id`` - appended to ``parent`` forms the full backup_uri of the - form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source_backup (:class:`str`): - Required. The source backup to be copied. The source - backup needs to be in READY state for it to be copied. - Once CopyBackup is in progress, the source backup cannot - be deleted or cleaned up on expiration until CopyBackup - is finished. Values are of the form: - ``projects//instances//backups/``. - - This corresponds to the ``source_backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - expire_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. The expiration time of the backup in - microsecond granularity. The expiration time must be at - least 6 hours and at most 366 days from the - ``create_time`` of the source backup. Once the - ``expire_time`` has passed, the backup is eligible to be - automatically deleted by Cloud Spanner to free the - resources used by the backup. - - This corresponds to the ``expire_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_id, source_backup, expire_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.CopyBackupRequest): - request = backup.CopyBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_id is not None: - request.backup_id = backup_id - if source_backup is not None: - request.source_backup = source_backup - if expire_time is not None: - request.expire_time = expire_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.copy_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backup.Backup, - metadata_type=backup.CopyBackupMetadata, - ) - - # Done; return the response. - return response - - async def get_backup(self, - request: Optional[Union[backup.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]]): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - name (:class:`str`): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.GetBackupRequest): - request = backup.GetBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup(self, - request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, - *, - backup: Optional[gsad_backup.Backup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup.Backup: - r"""Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = await client.update_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]]): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only - supported for the following fields: - - - ``backup.expire_time``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be - updated. This mask is relative to the Backup resource, - not to the request message. The field mask must always - be specified; this prevents any future fields from being - erased accidentally by clients that do not know about - them. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.UpdateBackupRequest): - request = gsad_backup.UpdateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup(self, - request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]]): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - name (:class:`str`): - Required. Name of the backup to delete. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.DeleteBackupRequest): - request = backup.DeleteBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_backups(self, - request: Optional[Union[backup.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupsAsyncPager: - r"""Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]]): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - parent (:class:`str`): - Required. The instance to list backups from. Values are - of the form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupsRequest): - request = backup.ListBackupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def restore_database(self, - request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database_id: Optional[str] = None, - backup: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]]): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - parent (:class:`str`): - Required. The name of the instance in which to create - the restored database. This instance must be in the same - project and have the same instance configuration as the - instance containing the source backup. Values are of the - form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The id of the database to create and restore - to. This database must not already exist. The - ``database_id`` appended to ``parent`` forms the full - database name of the form - ``projects//instances//databases/``. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (:class:`str`): - Name of the backup from which to restore. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database_id, backup] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): - request = spanner_database_admin.RestoreDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database_id is not None: - request.database_id = database_id - if backup is not None: - request.backup = backup - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def list_database_operations(self, - request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseOperationsAsyncPager: - r"""Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]]): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - parent (:class:`str`): - Required. The instance of the database operations. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): - request = spanner_database_admin.ListDatabaseOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabaseOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_operations(self, - request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupOperationsAsyncPager: - r"""Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]]): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - parent (:class:`str`): - Required. The instance of the backup operations. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupOperationsRequest): - request = backup.ListBackupOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_database_roles(self, - request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseRolesAsyncPager: - r"""Lists Cloud Spanner database roles. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]]): - The request object. The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - parent (:class:`str`): - Required. The database whose roles should be listed. - Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager: - The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): - request = spanner_database_admin.ListDatabaseRolesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_roles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabaseRolesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def add_split_points(self, - request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, - *, - database: Optional[str] = None, - split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.AddSplitPointsResponse: - r"""Adds split points to specified tables, indexes of a - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = await client.add_split_points(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]]): - The request object. The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - database (:class:`str`): - Required. The database on whose tables/indexes split - points are to be added. Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - split_points (:class:`MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]`): - Required. The split points to add. - This corresponds to the ``split_points`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: - The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, split_points] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): - request = spanner_database_admin.AddSplitPointsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if split_points: - request.split_points.extend(split_points) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.add_split_points] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - backup_schedule_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Creates a new backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]]): - The request object. The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - parent (:class:`str`): - Required. The name of the database - that this backup schedule applies to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule_id (:class:`str`): - Required. The Id to use for the backup schedule. The - ``backup_schedule_id`` appended to ``parent`` forms the - full backup schedule name of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``backup_schedule_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_schedule, backup_schedule_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): - request = gsad_backup_schedule.CreateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if backup_schedule_id is not None: - request.backup_schedule_id = backup_schedule_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_backup_schedule(self, - request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup_schedule.BackupSchedule: - r"""Gets backup schedule for the input schedule name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]]): - The request object. The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - name (:class:`str`): - Required. The name of the schedule to retrieve. Values - are of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.GetBackupScheduleRequest): - request = backup_schedule.GetBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, - *, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]]): - The request object. The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): - Required. The backup schedule to update. - ``backup_schedule.name``, and the fields to be updated - as specified by ``update_mask`` are required. Other - fields are ignored. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which - fields in the BackupSchedule resource - should be updated. This mask is relative - to the BackupSchedule resource, not to - the request message. The field mask must - always be specified; this prevents any - future fields from being erased - accidentally. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_schedule, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): - request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_schedule.name", request.backup_schedule.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup_schedule(self, - request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]]): - The request object. The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - name (:class:`str`): - Required. The name of the schedule to delete. Values are - of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): - request = backup_schedule.DeleteBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_backup_schedules(self, - request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupSchedulesAsyncPager: - r"""Lists all the backup schedules for the database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]]): - The request object. The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - parent (:class:`str`): - Required. Database is the parent - resource whose backup schedules should - be listed. Values are of the form - projects//instances//databases/ - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager: - The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): - request = backup_schedule.ListBackupSchedulesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_schedules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupSchedulesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def internal_update_graph_operation(self, - request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, - *, - database: Optional[str] = None, - operation_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: - r"""This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = await client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]]): - The request object. Internal request proto, do not use - directly. - database (:class:`str`): - Internal field, do not use directly. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - operation_id (:class:`str`): - Internal field, do not use directly. - This corresponds to the ``operation_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: - Internal response proto, do not use - directly. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, operation_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): - request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if operation_id is not None: - request.operation_id = operation_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.internal_update_graph_operation] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "DatabaseAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "DatabaseAdminAsyncClient", -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py deleted file mode 100644 index 38e3050e48..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ /dev/null @@ -1,4387 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import uuid -import warnings - -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DatabaseAdminGrpcTransport -from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .transports.rest import DatabaseAdminRestTransport - - -class DatabaseAdminClientMeta(type): - """Metaclass for the DatabaseAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] - _transport_registry["grpc"] = DatabaseAdminGrpcTransport - _transport_registry["grpc_asyncio"] = DatabaseAdminGrpcAsyncIOTransport - _transport_registry["rest"] = DatabaseAdminRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DatabaseAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DatabaseAdminClient(metaclass=DatabaseAdminClientMeta): - """Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DatabaseAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatabaseAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def backup_path(project: str,instance: str,backup: str,) -> str: - """Returns a fully-qualified backup string.""" - return "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) - - @staticmethod - def parse_backup_path(path: str) -> Dict[str,str]: - """Parses a backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/backups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def backup_schedule_path(project: str,instance: str,database: str,schedule: str,) -> str: - """Returns a fully-qualified backup_schedule string.""" - return "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) - - @staticmethod - def parse_backup_schedule_path(path: str) -> Dict[str,str]: - """Parses a backup_schedule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: - """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - - @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: - """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_version_path(project: str,location: str,key_ring: str,crypto_key: str,crypto_key_version: str,) -> str: - """Returns a fully-qualified crypto_key_version string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) - - @staticmethod - def parse_crypto_key_version_path(path: str) -> Dict[str,str]: - """Parses a crypto_key_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_path(project: str,instance: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_role_path(project: str,instance: str,database: str,role: str,) -> str: - """Returns a fully-qualified database_role string.""" - return "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) - - @staticmethod - def parse_database_role_path(path: str) -> Dict[str,str]: - """Parses a database_role path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/databaseRoles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_path(project: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: - """Returns a fully-qualified instance_partition string.""" - return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - - @staticmethod - def parse_instance_partition_path(path: str) -> Dict[str,str]: - """Parses a instance_partition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DatabaseAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the database admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DatabaseAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DatabaseAdminClient._read_environment_variables() - self._client_cert_source = DatabaseAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DatabaseAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DatabaseAdminTransport) - if transport_provided: - # transport is a DatabaseAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DatabaseAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DatabaseAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DatabaseAdminTransport], Callable[..., DatabaseAdminTransport]] = ( - DatabaseAdminClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DatabaseAdminTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.database_v1.DatabaseAdminClient`.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "credentialsType": None, - } - ) - - def list_databases(self, - request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesPager: - r"""Lists Cloud Spanner databases. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - parent (str): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabasesRequest): - request = spanner_database_admin.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabasesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_database(self, - request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - create_statement: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - parent (str): - Required. The name of the instance that will serve the - new database. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - create_statement (str): - Required. A ``CREATE DATABASE`` statement, which - specifies the ID of the new database. The database ID - must conform to the regular expression - ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 - characters in length. If the database ID is a reserved - word or if it contains a hyphen, the database ID must be - enclosed in backticks (:literal:`\``). - - This corresponds to the ``create_statement`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, create_statement] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): - request = spanner_database_admin.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if create_statement is not None: - request.create_statement = create_statement - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def get_database(self, - request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.Database: - r"""Gets the state of a Cloud Spanner database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - name (str): - Required. The name of the requested database. Values are - of the form - ``projects//instances//databases/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Database: - A Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseRequest): - request = spanner_database_admin.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[spanner_database_admin.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]): - The request object. The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - database (google.cloud.spanner_admin_database_v1.types.Database): - Required. The database to update. The ``name`` field of - the database is of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to update. Currently, only - ``enable_drop_protection`` field can be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): - request = spanner_database_admin.UpdateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def update_database_ddl(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - statements: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): - The request object. Enqueues the given DDL statements to be applied, in - order but not necessarily all at once, to the database - schema at some point (or points) in the future. The - server checks that the statements are executable - (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - ``NULL`` value in a column to which ``NOT NULL`` would - be added). If a statement fails, all subsequent - statements in the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - database (str): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - statements (MutableSequence[str]): - Required. DDL statements to be - applied to the database. - - This corresponds to the ``statements`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, statements] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): - request = spanner_database_admin.UpdateDatabaseDdlRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if statements is not None: - request.statements = statements - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, - ) - - # Done; return the response. - return response - - def drop_database(self, - request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - client.drop_database(request=request) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - database (str): - Required. The database to be dropped. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.DropDatabaseRequest): - request = spanner_database_admin.DropDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.drop_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_database_ddl(self, - request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = client.get_database_ddl(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - database (str): - Required. The database whose schema we wish to get. - Values are of the form - ``projects//instances//databases/`` - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): - request = spanner_database_admin.GetDatabaseDdlRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (MutableSequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_backup(self, - request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup: Optional[gsad_backup.Backup] = None, - backup_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - parent (str): - Required. The name of the instance in which the backup - will be created. This must be the same instance that - contains the database the backup will be created from. - The backup will be stored in the location(s) specified - in the instance configuration of this instance. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to create. - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (str): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full - backup name of the form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup, backup_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.CreateBackupRequest): - request = gsad_backup.CreateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup is not None: - request.backup = backup - if backup_id is not None: - request.backup_id = backup_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gsad_backup.Backup, - metadata_type=gsad_backup.CreateBackupMetadata, - ) - - # Done; return the response. - return response - - def copy_backup(self, - request: Optional[Union[backup.CopyBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_id: Optional[str] = None, - source_backup: Optional[str] = None, - expire_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]): - The request object. The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - parent (str): - Required. The name of the destination instance that will - contain the backup copy. Values are of the form: - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (str): - Required. The id of the backup copy. The ``backup_id`` - appended to ``parent`` forms the full backup_uri of the - form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source_backup (str): - Required. The source backup to be copied. The source - backup needs to be in READY state for it to be copied. - Once CopyBackup is in progress, the source backup cannot - be deleted or cleaned up on expiration until CopyBackup - is finished. Values are of the form: - ``projects//instances//backups/``. - - This corresponds to the ``source_backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The expiration time of the backup in - microsecond granularity. The expiration time must be at - least 6 hours and at most 366 days from the - ``create_time`` of the source backup. Once the - ``expire_time`` has passed, the backup is eligible to be - automatically deleted by Cloud Spanner to free the - resources used by the backup. - - This corresponds to the ``expire_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_id, source_backup, expire_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.CopyBackupRequest): - request = backup.CopyBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_id is not None: - request.backup_id = backup_id - if source_backup is not None: - request.source_backup = source_backup - if expire_time is not None: - request.expire_time = expire_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.copy_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backup.Backup, - metadata_type=backup.CopyBackupMetadata, - ) - - # Done; return the response. - return response - - def get_backup(self, - request: Optional[Union[backup.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - name (str): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.GetBackupRequest): - request = backup.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup(self, - request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, - *, - backup: Optional[gsad_backup.Backup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup.Backup: - r"""Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = client.update_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only - supported for the following fields: - - - ``backup.expire_time``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be - updated. This mask is relative to the Backup resource, - not to the request message. The field mask must always - be specified; this prevents any future fields from being - erased accidentally by clients that do not know about - them. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.UpdateBackupRequest): - request = gsad_backup.UpdateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup(self, - request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - name (str): - Required. Name of the backup to delete. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.DeleteBackupRequest): - request = backup.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_backups(self, - request: Optional[Union[backup.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupsPager: - r"""Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - parent (str): - Required. The instance to list backups from. Values are - of the form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupsRequest): - request = backup.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def restore_database(self, - request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database_id: Optional[str] = None, - backup: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - parent (str): - Required. The name of the instance in which to create - the restored database. This instance must be in the same - project and have the same instance configuration as the - instance containing the source backup. Values are of the - form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The id of the database to create and restore - to. This database must not already exist. The - ``database_id`` appended to ``parent`` forms the full - database name of the form - ``projects//instances//databases/``. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (str): - Name of the backup from which to restore. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database_id, backup] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): - request = spanner_database_admin.RestoreDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database_id is not None: - request.database_id = database_id - if backup is not None: - request.backup = backup - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - def list_database_operations(self, - request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseOperationsPager: - r"""Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - parent (str): - Required. The instance of the database operations. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): - request = spanner_database_admin.ListDatabaseOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_database_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabaseOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_operations(self, - request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupOperationsPager: - r"""Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - parent (str): - Required. The instance of the backup operations. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupOperationsRequest): - request = backup.ListBackupOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_database_roles(self, - request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseRolesPager: - r"""Lists Cloud Spanner database roles. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]): - The request object. The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - parent (str): - Required. The database whose roles should be listed. - Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager: - The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): - request = spanner_database_admin.ListDatabaseRolesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_database_roles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabaseRolesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def add_split_points(self, - request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, - *, - database: Optional[str] = None, - split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.AddSplitPointsResponse: - r"""Adds split points to specified tables, indexes of a - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = client.add_split_points(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]): - The request object. The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - database (str): - Required. The database on whose tables/indexes split - points are to be added. Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): - Required. The split points to add. - This corresponds to the ``split_points`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: - The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, split_points] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): - request = spanner_database_admin.AddSplitPointsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if split_points is not None: - request.split_points = split_points - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.add_split_points] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - backup_schedule_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Creates a new backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]): - The request object. The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - parent (str): - Required. The name of the database - that this backup schedule applies to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule_id (str): - Required. The Id to use for the backup schedule. The - ``backup_schedule_id`` appended to ``parent`` forms the - full backup schedule name of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``backup_schedule_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_schedule, backup_schedule_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): - request = gsad_backup_schedule.CreateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if backup_schedule_id is not None: - request.backup_schedule_id = backup_schedule_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_backup_schedule(self, - request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup_schedule.BackupSchedule: - r"""Gets backup schedule for the input schedule name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]): - The request object. The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - name (str): - Required. The name of the schedule to retrieve. Values - are of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.GetBackupScheduleRequest): - request = backup_schedule.GetBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, - *, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]): - The request object. The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to update. - ``backup_schedule.name``, and the fields to be updated - as specified by ``update_mask`` are required. Other - fields are ignored. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which - fields in the BackupSchedule resource - should be updated. This mask is relative - to the BackupSchedule resource, not to - the request message. The field mask must - always be specified; this prevents any - future fields from being erased - accidentally. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_schedule, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): - request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_schedule.name", request.backup_schedule.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup_schedule(self, - request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]): - The request object. The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - name (str): - Required. The name of the schedule to delete. Values are - of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): - request = backup_schedule.DeleteBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_backup_schedules(self, - request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupSchedulesPager: - r"""Lists all the backup schedules for the database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]): - The request object. The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - parent (str): - Required. Database is the parent - resource whose backup schedules should - be listed. Values are of the form - projects//instances//databases/ - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager: - The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): - request = backup_schedule.ListBackupSchedulesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupSchedulesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def internal_update_graph_operation(self, - request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, - *, - database: Optional[str] = None, - operation_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: - r"""This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]): - The request object. Internal request proto, do not use - directly. - database (str): - Internal field, do not use directly. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - operation_id (str): - Internal field, do not use directly. - This corresponds to the ``operation_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: - Internal response proto, do not use - directly. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, operation_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): - request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if operation_id is not None: - request.operation_id = operation_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.internal_update_graph_operation] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DatabaseAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "DatabaseAdminClient", -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py deleted file mode 100644 index 5a5b92e7d1..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ /dev/null @@ -1,864 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.longrunning import operations_pb2 # type: ignore - - -class ListDatabasesPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabasesResponse], - request: spanner_database_admin.ListDatabasesRequest, - response: spanner_database_admin.ListDatabasesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_database_admin.Database]: - for page in self.pages: - yield from page.databases - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesAsyncPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabasesResponse]], - request: spanner_database_admin.ListDatabasesRequest, - response: spanner_database_admin.ListDatabasesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: - async def async_generator(): - async for page in self.pages: - for response in page.databases: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup.ListBackupsResponse], - request: backup.ListBackupsRequest, - response: backup.ListBackupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backup.Backup]: - for page in self.pages: - yield from page.backups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsAsyncPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup.ListBackupsResponse]], - request: backup.ListBackupsRequest, - response: backup.ListBackupsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backup.Backup]: - async def async_generator(): - async for page in self.pages: - for response in page.backups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseOperationsPager: - """A pager for iterating through ``list_database_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabaseOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabaseOperationsResponse], - request: spanner_database_admin.ListDatabaseOperationsRequest, - response: spanner_database_admin.ListDatabaseOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseOperationsAsyncPager: - """A pager for iterating through ``list_database_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabaseOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]], - request: spanner_database_admin.ListDatabaseOperationsRequest, - response: spanner_database_admin.ListDatabaseOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupOperationsPager: - """A pager for iterating through ``list_backup_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup.ListBackupOperationsResponse], - request: backup.ListBackupOperationsRequest, - response: backup.ListBackupOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupOperationsAsyncPager: - """A pager for iterating through ``list_backup_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup.ListBackupOperationsResponse]], - request: backup.ListBackupOperationsRequest, - response: backup.ListBackupOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseRolesPager: - """A pager for iterating through ``list_database_roles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``database_roles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabaseRoles`` requests and continue to iterate - through the ``database_roles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabaseRolesResponse], - request: spanner_database_admin.ListDatabaseRolesRequest, - response: spanner_database_admin.ListDatabaseRolesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseRolesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabaseRolesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_database_admin.DatabaseRole]: - for page in self.pages: - yield from page.database_roles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseRolesAsyncPager: - """A pager for iterating through ``list_database_roles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``database_roles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabaseRoles`` requests and continue to iterate - through the ``database_roles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseRolesResponse]], - request: spanner_database_admin.ListDatabaseRolesRequest, - response: spanner_database_admin.ListDatabaseRolesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseRolesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseRolesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_database_admin.DatabaseRole]: - async def async_generator(): - async for page in self.pages: - for response in page.database_roles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupSchedulesPager: - """A pager for iterating through ``list_backup_schedules`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backup_schedules`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupSchedules`` requests and continue to iterate - through the ``backup_schedules`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup_schedule.ListBackupSchedulesResponse], - request: backup_schedule.ListBackupSchedulesRequest, - response: backup_schedule.ListBackupSchedulesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup_schedule.ListBackupSchedulesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup_schedule.ListBackupSchedulesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backup_schedule.BackupSchedule]: - for page in self.pages: - yield from page.backup_schedules - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupSchedulesAsyncPager: - """A pager for iterating through ``list_backup_schedules`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backup_schedules`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupSchedules`` requests and continue to iterate - through the ``backup_schedules`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup_schedule.ListBackupSchedulesResponse]], - request: backup_schedule.ListBackupSchedulesRequest, - response: backup_schedule.ListBackupSchedulesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup_schedule.ListBackupSchedulesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup_schedule.ListBackupSchedulesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backup_schedule.BackupSchedule]: - async def async_generator(): - async for page in self.pages: - for response in page.backup_schedules: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst deleted file mode 100644 index f70c023a98..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DatabaseAdminTransport` is the ABC for all transports. -- public child `DatabaseAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DatabaseAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDatabaseAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DatabaseAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py deleted file mode 100644 index 975834b54d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DatabaseAdminTransport -from .grpc import DatabaseAdminGrpcTransport -from .grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .rest import DatabaseAdminRestTransport -from .rest import DatabaseAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] -_transport_registry['grpc'] = DatabaseAdminGrpcTransport -_transport_registry['grpc_asyncio'] = DatabaseAdminGrpcAsyncIOTransport -_transport_registry['rest'] = DatabaseAdminRestTransport - -__all__ = ( - 'DatabaseAdminTransport', - 'DatabaseAdminGrpcTransport', - 'DatabaseAdminGrpcAsyncIOTransport', - 'DatabaseAdminRestTransport', - 'DatabaseAdminRestInterceptor', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py deleted file mode 100644 index 7d801501d3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ /dev/null @@ -1,795 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DatabaseAdminTransport(abc.ABC): - """Abstract transport class for DatabaseAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database: gapic_v1.method.wrap_method( - self.update_database, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database_ddl: gapic_v1.method.wrap_method( - self.update_database_ddl, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.drop_database: gapic_v1.method.wrap_method( - self.drop_database, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database_ddl: gapic_v1.method.wrap_method( - self.get_database_ddl, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.create_backup: gapic_v1.method.wrap_method( - self.create_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.copy_backup: gapic_v1.method.wrap_method( - self.copy_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup: gapic_v1.method.wrap_method( - self.get_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup: gapic_v1.method.wrap_method( - self.update_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup: gapic_v1.method.wrap_method( - self.delete_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backups: gapic_v1.method.wrap_method( - self.list_backups, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.restore_database: gapic_v1.method.wrap_method( - self.restore_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_operations: gapic_v1.method.wrap_method( - self.list_database_operations, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_operations: gapic_v1.method.wrap_method( - self.list_backup_operations, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_roles: gapic_v1.method.wrap_method( - self.list_database_roles, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.add_split_points: gapic_v1.method.wrap_method( - self.add_split_points, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_backup_schedule: gapic_v1.method.wrap_method( - self.create_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup_schedule: gapic_v1.method.wrap_method( - self.get_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup_schedule: gapic_v1.method.wrap_method( - self.update_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup_schedule: gapic_v1.method.wrap_method( - self.delete_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_schedules: gapic_v1.method.wrap_method( - self.list_backup_schedules, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.internal_update_graph_operation: gapic_v1.method.wrap_method( - self.internal_update_graph_operation, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - Union[ - spanner_database_admin.ListDatabasesResponse, - Awaitable[spanner_database_admin.ListDatabasesResponse] - ]]: - raise NotImplementedError() - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - Union[ - spanner_database_admin.Database, - Awaitable[spanner_database_admin.Database] - ]]: - raise NotImplementedError() - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - Union[ - spanner_database_admin.GetDatabaseDdlResponse, - Awaitable[spanner_database_admin.GetDatabaseDdlResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - Union[ - backup.Backup, - Awaitable[backup.Backup] - ]]: - raise NotImplementedError() - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - Union[ - gsad_backup.Backup, - Awaitable[gsad_backup.Backup] - ]]: - raise NotImplementedError() - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - Union[ - backup.ListBackupsResponse, - Awaitable[backup.ListBackupsResponse] - ]]: - raise NotImplementedError() - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - Union[ - spanner_database_admin.ListDatabaseOperationsResponse, - Awaitable[spanner_database_admin.ListDatabaseOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - Union[ - backup.ListBackupOperationsResponse, - Awaitable[backup.ListBackupOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - Union[ - spanner_database_admin.ListDatabaseRolesResponse, - Awaitable[spanner_database_admin.ListDatabaseRolesResponse] - ]]: - raise NotImplementedError() - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - Union[ - spanner_database_admin.AddSplitPointsResponse, - Awaitable[spanner_database_admin.AddSplitPointsResponse] - ]]: - raise NotImplementedError() - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - Union[ - gsad_backup_schedule.BackupSchedule, - Awaitable[gsad_backup_schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - Union[ - backup_schedule.BackupSchedule, - Awaitable[backup_schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - Union[ - gsad_backup_schedule.BackupSchedule, - Awaitable[gsad_backup_schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - Union[ - backup_schedule.ListBackupSchedulesResponse, - Awaitable[backup_schedule.ListBackupSchedulesResponse] - ]]: - raise NotImplementedError() - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - Union[ - spanner_database_admin.InternalUpdateGraphOperationResponse, - Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DatabaseAdminTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py deleted file mode 100644 index 8f548acc7c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ /dev/null @@ -1,1285 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DatabaseAdminGrpcTransport(DatabaseAdminTransport): - """gRPC backend transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - spanner_database_admin.ListDatabasesResponse]: - r"""Return a callable for the list databases method over gRPC. - - Lists Cloud Spanner databases. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', - request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the create database method over gRPC. - - Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', - request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - spanner_database_admin.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets the state of a Cloud Spanner database. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', - request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, - response_deserializer=spanner_database_admin.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the update database method over gRPC. - - Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.UpdateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', - request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database'] - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - operations_pb2.Operation]: - r"""Return a callable for the update database ddl method over gRPC. - - Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Returns: - Callable[[~.UpdateDatabaseDdlRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database_ddl' not in self._stubs: - self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', - request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database_ddl'] - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - empty_pb2.Empty]: - r"""Return a callable for the drop database method over gRPC. - - Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - Returns: - Callable[[~.DropDatabaseRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'drop_database' not in self._stubs: - self._stubs['drop_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', - request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['drop_database'] - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - spanner_database_admin.GetDatabaseDdlResponse]: - r"""Return a callable for the get database ddl method over gRPC. - - Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Returns: - Callable[[~.GetDatabaseDdlRequest], - ~.GetDatabaseDdlResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database_ddl' not in self._stubs: - self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', - request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, - response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, - ) - return self._stubs['get_database_ddl'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the create backup method over gRPC. - - Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Returns: - Callable[[~.CreateBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup' not in self._stubs: - self._stubs['create_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', - request_serializer=gsad_backup.CreateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup'] - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the copy backup method over gRPC. - - Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - Returns: - Callable[[~.CopyBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'copy_backup' not in self._stubs: - self._stubs['copy_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', - request_serializer=backup.CopyBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['copy_backup'] - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - backup.Backup]: - r"""Return a callable for the get backup method over gRPC. - - Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.GetBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', - request_serializer=backup.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - gsad_backup.Backup]: - r"""Return a callable for the update backup method over gRPC. - - Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.UpdateBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', - request_serializer=gsad_backup.UpdateBackupRequest.serialize, - response_deserializer=gsad_backup.Backup.deserialize, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.DeleteBackupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', - request_serializer=backup.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - backup.ListBackupsResponse]: - r"""Return a callable for the list backups method over gRPC. - - Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Returns: - Callable[[~.ListBackupsRequest], - ~.ListBackupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', - request_serializer=backup.ListBackupsRequest.serialize, - response_deserializer=backup.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the restore database method over gRPC. - - Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Returns: - Callable[[~.RestoreDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', - request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - spanner_database_admin.ListDatabaseOperationsResponse]: - r"""Return a callable for the list database operations method over gRPC. - - Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Returns: - Callable[[~.ListDatabaseOperationsRequest], - ~.ListDatabaseOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_operations' not in self._stubs: - self._stubs['list_database_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', - request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, - ) - return self._stubs['list_database_operations'] - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - backup.ListBackupOperationsResponse]: - r"""Return a callable for the list backup operations method over gRPC. - - Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Returns: - Callable[[~.ListBackupOperationsRequest], - ~.ListBackupOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_operations' not in self._stubs: - self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', - request_serializer=backup.ListBackupOperationsRequest.serialize, - response_deserializer=backup.ListBackupOperationsResponse.deserialize, - ) - return self._stubs['list_backup_operations'] - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - spanner_database_admin.ListDatabaseRolesResponse]: - r"""Return a callable for the list database roles method over gRPC. - - Lists Cloud Spanner database roles. - - Returns: - Callable[[~.ListDatabaseRolesRequest], - ~.ListDatabaseRolesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_roles' not in self._stubs: - self._stubs['list_database_roles'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', - request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, - ) - return self._stubs['list_database_roles'] - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - spanner_database_admin.AddSplitPointsResponse]: - r"""Return a callable for the add split points method over gRPC. - - Adds split points to specified tables, indexes of a - database. - - Returns: - Callable[[~.AddSplitPointsRequest], - ~.AddSplitPointsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'add_split_points' not in self._stubs: - self._stubs['add_split_points'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', - request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, - response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, - ) - return self._stubs['add_split_points'] - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a new backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_schedule' not in self._stubs: - self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', - request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['create_backup_schedule'] - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - backup_schedule.BackupSchedule]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets backup schedule for the input schedule name. - - Returns: - Callable[[~.GetBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_schedule' not in self._stubs: - self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', - request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, - response_deserializer=backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['get_backup_schedule'] - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_schedule' not in self._stubs: - self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', - request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['update_backup_schedule'] - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_schedule' not in self._stubs: - self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', - request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup_schedule'] - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - backup_schedule.ListBackupSchedulesResponse]: - r"""Return a callable for the list backup schedules method over gRPC. - - Lists all the backup schedules for the database. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - ~.ListBackupSchedulesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_schedules' not in self._stubs: - self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', - request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, - response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs['list_backup_schedules'] - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - spanner_database_admin.InternalUpdateGraphOperationResponse]: - r"""Return a callable for the internal update graph - operation method over gRPC. - - This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - Returns: - Callable[[~.InternalUpdateGraphOperationRequest], - ~.InternalUpdateGraphOperationResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'internal_update_graph_operation' not in self._stubs: - self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', - request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, - response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, - ) - return self._stubs['internal_update_graph_operation'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DatabaseAdminGrpcTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py deleted file mode 100644 index 06cfaa6349..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,1656 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import DatabaseAdminGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DatabaseAdminGrpcAsyncIOTransport(DatabaseAdminTransport): - """gRPC AsyncIO backend transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - Awaitable[spanner_database_admin.ListDatabasesResponse]]: - r"""Return a callable for the list databases method over gRPC. - - Lists Cloud Spanner databases. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', - request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create database method over gRPC. - - Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', - request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - Awaitable[spanner_database_admin.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets the state of a Cloud Spanner database. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', - request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, - response_deserializer=spanner_database_admin.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update database method over gRPC. - - Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.UpdateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', - request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database'] - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update database ddl method over gRPC. - - Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Returns: - Callable[[~.UpdateDatabaseDdlRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database_ddl' not in self._stubs: - self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', - request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database_ddl'] - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the drop database method over gRPC. - - Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - Returns: - Callable[[~.DropDatabaseRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'drop_database' not in self._stubs: - self._stubs['drop_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', - request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['drop_database'] - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - Awaitable[spanner_database_admin.GetDatabaseDdlResponse]]: - r"""Return a callable for the get database ddl method over gRPC. - - Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Returns: - Callable[[~.GetDatabaseDdlRequest], - Awaitable[~.GetDatabaseDdlResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database_ddl' not in self._stubs: - self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', - request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, - response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, - ) - return self._stubs['get_database_ddl'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create backup method over gRPC. - - Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Returns: - Callable[[~.CreateBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup' not in self._stubs: - self._stubs['create_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', - request_serializer=gsad_backup.CreateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup'] - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the copy backup method over gRPC. - - Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - Returns: - Callable[[~.CopyBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'copy_backup' not in self._stubs: - self._stubs['copy_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', - request_serializer=backup.CopyBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['copy_backup'] - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - Awaitable[backup.Backup]]: - r"""Return a callable for the get backup method over gRPC. - - Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.GetBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', - request_serializer=backup.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - Awaitable[gsad_backup.Backup]]: - r"""Return a callable for the update backup method over gRPC. - - Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.UpdateBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', - request_serializer=gsad_backup.UpdateBackupRequest.serialize, - response_deserializer=gsad_backup.Backup.deserialize, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.DeleteBackupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', - request_serializer=backup.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - Awaitable[backup.ListBackupsResponse]]: - r"""Return a callable for the list backups method over gRPC. - - Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Returns: - Callable[[~.ListBackupsRequest], - Awaitable[~.ListBackupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', - request_serializer=backup.ListBackupsRequest.serialize, - response_deserializer=backup.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restore database method over gRPC. - - Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Returns: - Callable[[~.RestoreDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', - request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]]: - r"""Return a callable for the list database operations method over gRPC. - - Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Returns: - Callable[[~.ListDatabaseOperationsRequest], - Awaitable[~.ListDatabaseOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_operations' not in self._stubs: - self._stubs['list_database_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', - request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, - ) - return self._stubs['list_database_operations'] - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - Awaitable[backup.ListBackupOperationsResponse]]: - r"""Return a callable for the list backup operations method over gRPC. - - Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Returns: - Callable[[~.ListBackupOperationsRequest], - Awaitable[~.ListBackupOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_operations' not in self._stubs: - self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', - request_serializer=backup.ListBackupOperationsRequest.serialize, - response_deserializer=backup.ListBackupOperationsResponse.deserialize, - ) - return self._stubs['list_backup_operations'] - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - Awaitable[spanner_database_admin.ListDatabaseRolesResponse]]: - r"""Return a callable for the list database roles method over gRPC. - - Lists Cloud Spanner database roles. - - Returns: - Callable[[~.ListDatabaseRolesRequest], - Awaitable[~.ListDatabaseRolesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_roles' not in self._stubs: - self._stubs['list_database_roles'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', - request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, - ) - return self._stubs['list_database_roles'] - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - Awaitable[spanner_database_admin.AddSplitPointsResponse]]: - r"""Return a callable for the add split points method over gRPC. - - Adds split points to specified tables, indexes of a - database. - - Returns: - Callable[[~.AddSplitPointsRequest], - Awaitable[~.AddSplitPointsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'add_split_points' not in self._stubs: - self._stubs['add_split_points'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', - request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, - response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, - ) - return self._stubs['add_split_points'] - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - Awaitable[gsad_backup_schedule.BackupSchedule]]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a new backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_schedule' not in self._stubs: - self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', - request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['create_backup_schedule'] - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - Awaitable[backup_schedule.BackupSchedule]]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets backup schedule for the input schedule name. - - Returns: - Callable[[~.GetBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_schedule' not in self._stubs: - self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', - request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, - response_deserializer=backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['get_backup_schedule'] - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - Awaitable[gsad_backup_schedule.BackupSchedule]]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_schedule' not in self._stubs: - self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', - request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['update_backup_schedule'] - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_schedule' not in self._stubs: - self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', - request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup_schedule'] - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - Awaitable[backup_schedule.ListBackupSchedulesResponse]]: - r"""Return a callable for the list backup schedules method over gRPC. - - Lists all the backup schedules for the database. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - Awaitable[~.ListBackupSchedulesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_schedules' not in self._stubs: - self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', - request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, - response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs['list_backup_schedules'] - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse]]: - r"""Return a callable for the internal update graph - operation method over gRPC. - - This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - Returns: - Callable[[~.InternalUpdateGraphOperationRequest], - Awaitable[~.InternalUpdateGraphOperationResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'internal_update_graph_operation' not in self._stubs: - self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', - request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, - response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, - ) - return self._stubs['internal_update_graph_operation'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_databases: self._wrap_method( - self.list_databases, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_database: self._wrap_method( - self.create_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database: self._wrap_method( - self.get_database, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database: self._wrap_method( - self.update_database, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database_ddl: self._wrap_method( - self.update_database_ddl, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.drop_database: self._wrap_method( - self.drop_database, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database_ddl: self._wrap_method( - self.get_database_ddl, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.create_backup: self._wrap_method( - self.create_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.copy_backup: self._wrap_method( - self.copy_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup: self._wrap_method( - self.get_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup: self._wrap_method( - self.update_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup: self._wrap_method( - self.delete_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backups: self._wrap_method( - self.list_backups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.restore_database: self._wrap_method( - self.restore_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_operations: self._wrap_method( - self.list_database_operations, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_operations: self._wrap_method( - self.list_backup_operations, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_roles: self._wrap_method( - self.list_database_roles, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.add_split_points: self._wrap_method( - self.add_split_points, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_backup_schedule: self._wrap_method( - self.create_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup_schedule: self._wrap_method( - self.get_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup_schedule: self._wrap_method( - self.update_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup_schedule: self._wrap_method( - self.delete_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_schedules: self._wrap_method( - self.list_backup_schedules, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.internal_update_graph_operation: self._wrap_method( - self.internal_update_graph_operation, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'DatabaseAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py deleted file mode 100644 index 012c64468c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ /dev/null @@ -1,5336 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseDatabaseAdminRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DatabaseAdminRestInterceptor: - """Interceptor for DatabaseAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DatabaseAdminRestTransport. - - .. code-block:: python - class MyCustomDatabaseAdminInterceptor(DatabaseAdminRestInterceptor): - def pre_add_split_points(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_add_split_points(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_copy_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_copy_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_drop_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database_ddl(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database_ddl(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_internal_update_graph_operation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_internal_update_graph_operation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backups(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backups(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_schedules(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_schedules(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_database_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_database_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_database_roles(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_database_roles(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_databases(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_databases(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restore_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restore_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database_ddl(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database_ddl(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DatabaseAdminRestTransport(interceptor=MyCustomDatabaseAdminInterceptor()) - client = DatabaseAdminClient(transport=transport) - - - """ - def pre_add_split_points(self, request: spanner_database_admin.AddSplitPointsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for add_split_points - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_add_split_points(self, response: spanner_database_admin.AddSplitPointsResponse) -> spanner_database_admin.AddSplitPointsResponse: - """Post-rpc interceptor for add_split_points - - DEPRECATED. Please use the `post_add_split_points_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_add_split_points` interceptor runs - before the `post_add_split_points_with_metadata` interceptor. - """ - return response - - def post_add_split_points_with_metadata(self, response: spanner_database_admin.AddSplitPointsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for add_split_points - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_add_split_points_with_metadata` - interceptor in new development instead of the `post_add_split_points` interceptor. - When both interceptors are used, this `post_add_split_points_with_metadata` interceptor runs after the - `post_add_split_points` interceptor. The (possibly modified) response returned by - `post_add_split_points` will be passed to - `post_add_split_points_with_metadata`. - """ - return response, metadata - - def pre_copy_backup(self, request: backup.CopyBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.CopyBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for copy_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_copy_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for copy_backup - - DEPRECATED. Please use the `post_copy_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_copy_backup` interceptor runs - before the `post_copy_backup_with_metadata` interceptor. - """ - return response - - def post_copy_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for copy_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_copy_backup_with_metadata` - interceptor in new development instead of the `post_copy_backup` interceptor. - When both interceptors are used, this `post_copy_backup_with_metadata` interceptor runs after the - `post_copy_backup` interceptor. The (possibly modified) response returned by - `post_copy_backup` will be passed to - `post_copy_backup_with_metadata`. - """ - return response, metadata - - def pre_create_backup(self, request: gsad_backup.CreateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.CreateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_create_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_backup - - DEPRECATED. Please use the `post_create_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_create_backup` interceptor runs - before the `post_create_backup_with_metadata` interceptor. - """ - return response - - def post_create_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_create_backup_with_metadata` - interceptor in new development instead of the `post_create_backup` interceptor. - When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the - `post_create_backup` interceptor. The (possibly modified) response returned by - `post_create_backup` will be passed to - `post_create_backup_with_metadata`. - """ - return response, metadata - - def pre_create_backup_schedule(self, request: gsad_backup_schedule.CreateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.CreateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_create_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: - """Post-rpc interceptor for create_backup_schedule - - DEPRECATED. Please use the `post_create_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_create_backup_schedule` interceptor runs - before the `post_create_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_create_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_create_backup_schedule_with_metadata` - interceptor in new development instead of the `post_create_backup_schedule` interceptor. - When both interceptors are used, this `post_create_backup_schedule_with_metadata` interceptor runs after the - `post_create_backup_schedule` interceptor. The (possibly modified) response returned by - `post_create_backup_schedule` will be passed to - `post_create_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_create_database(self, request: spanner_database_admin.CreateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_create_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_database - - DEPRECATED. Please use the `post_create_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_create_database` interceptor runs - before the `post_create_database_with_metadata` interceptor. - """ - return response - - def post_create_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_create_database_with_metadata` - interceptor in new development instead of the `post_create_database` interceptor. - When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the - `post_create_database` interceptor. The (possibly modified) response returned by - `post_create_database` will be passed to - `post_create_database_with_metadata`. - """ - return response, metadata - - def pre_delete_backup(self, request: backup.DeleteBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def pre_delete_backup_schedule(self, request: backup_schedule.DeleteBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.DeleteBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def pre_drop_database(self, request: spanner_database_admin.DropDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.DropDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for drop_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def pre_get_backup(self, request: backup.GetBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_backup(self, response: backup.Backup) -> backup.Backup: - """Post-rpc interceptor for get_backup - - DEPRECATED. Please use the `post_get_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_backup` interceptor runs - before the `post_get_backup_with_metadata` interceptor. - """ - return response - - def post_get_backup_with_metadata(self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_backup_with_metadata` - interceptor in new development instead of the `post_get_backup` interceptor. - When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the - `post_get_backup` interceptor. The (possibly modified) response returned by - `post_get_backup` will be passed to - `post_get_backup_with_metadata`. - """ - return response, metadata - - def pre_get_backup_schedule(self, request: backup_schedule.GetBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.GetBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_backup_schedule(self, response: backup_schedule.BackupSchedule) -> backup_schedule.BackupSchedule: - """Post-rpc interceptor for get_backup_schedule - - DEPRECATED. Please use the `post_get_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_backup_schedule` interceptor runs - before the `post_get_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_get_backup_schedule_with_metadata(self, response: backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_backup_schedule_with_metadata` - interceptor in new development instead of the `post_get_backup_schedule` interceptor. - When both interceptors are used, this `post_get_backup_schedule_with_metadata` interceptor runs after the - `post_get_backup_schedule` interceptor. The (possibly modified) response returned by - `post_get_backup_schedule` will be passed to - `post_get_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_get_database(self, request: spanner_database_admin.GetDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_database(self, response: spanner_database_admin.Database) -> spanner_database_admin.Database: - """Post-rpc interceptor for get_database - - DEPRECATED. Please use the `post_get_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_database` interceptor runs - before the `post_get_database_with_metadata` interceptor. - """ - return response - - def post_get_database_with_metadata(self, response: spanner_database_admin.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_database_with_metadata` - interceptor in new development instead of the `post_get_database` interceptor. - When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the - `post_get_database` interceptor. The (possibly modified) response returned by - `post_get_database` will be passed to - `post_get_database_with_metadata`. - """ - return response, metadata - - def pre_get_database_ddl(self, request: spanner_database_admin.GetDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_database_ddl - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_database_ddl(self, response: spanner_database_admin.GetDatabaseDdlResponse) -> spanner_database_admin.GetDatabaseDdlResponse: - """Post-rpc interceptor for get_database_ddl - - DEPRECATED. Please use the `post_get_database_ddl_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_database_ddl` interceptor runs - before the `post_get_database_ddl_with_metadata` interceptor. - """ - return response - - def post_get_database_ddl_with_metadata(self, response: spanner_database_admin.GetDatabaseDdlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database_ddl - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_database_ddl_with_metadata` - interceptor in new development instead of the `post_get_database_ddl` interceptor. - When both interceptors are used, this `post_get_database_ddl_with_metadata` interceptor runs after the - `post_get_database_ddl` interceptor. The (possibly modified) response returned by - `post_get_database_ddl` will be passed to - `post_get_database_ddl_with_metadata`. - """ - return response, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_list_backup_operations(self, request: backup.ListBackupOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_backup_operations(self, response: backup.ListBackupOperationsResponse) -> backup.ListBackupOperationsResponse: - """Post-rpc interceptor for list_backup_operations - - DEPRECATED. Please use the `post_list_backup_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_backup_operations` interceptor runs - before the `post_list_backup_operations_with_metadata` interceptor. - """ - return response - - def post_list_backup_operations_with_metadata(self, response: backup.ListBackupOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backup_operations_with_metadata` - interceptor in new development instead of the `post_list_backup_operations` interceptor. - When both interceptors are used, this `post_list_backup_operations_with_metadata` interceptor runs after the - `post_list_backup_operations` interceptor. The (possibly modified) response returned by - `post_list_backup_operations` will be passed to - `post_list_backup_operations_with_metadata`. - """ - return response, metadata - - def pre_list_backups(self, request: backup.ListBackupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backups - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_backups(self, response: backup.ListBackupsResponse) -> backup.ListBackupsResponse: - """Post-rpc interceptor for list_backups - - DEPRECATED. Please use the `post_list_backups_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_backups` interceptor runs - before the `post_list_backups_with_metadata` interceptor. - """ - return response - - def post_list_backups_with_metadata(self, response: backup.ListBackupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backups - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backups_with_metadata` - interceptor in new development instead of the `post_list_backups` interceptor. - When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the - `post_list_backups` interceptor. The (possibly modified) response returned by - `post_list_backups` will be passed to - `post_list_backups_with_metadata`. - """ - return response, metadata - - def pre_list_backup_schedules(self, request: backup_schedule.ListBackupSchedulesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_schedules - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_backup_schedules(self, response: backup_schedule.ListBackupSchedulesResponse) -> backup_schedule.ListBackupSchedulesResponse: - """Post-rpc interceptor for list_backup_schedules - - DEPRECATED. Please use the `post_list_backup_schedules_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_backup_schedules` interceptor runs - before the `post_list_backup_schedules_with_metadata` interceptor. - """ - return response - - def post_list_backup_schedules_with_metadata(self, response: backup_schedule.ListBackupSchedulesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_schedules - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backup_schedules_with_metadata` - interceptor in new development instead of the `post_list_backup_schedules` interceptor. - When both interceptors are used, this `post_list_backup_schedules_with_metadata` interceptor runs after the - `post_list_backup_schedules` interceptor. The (possibly modified) response returned by - `post_list_backup_schedules` will be passed to - `post_list_backup_schedules_with_metadata`. - """ - return response, metadata - - def pre_list_database_operations(self, request: spanner_database_admin.ListDatabaseOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_database_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_database_operations(self, response: spanner_database_admin.ListDatabaseOperationsResponse) -> spanner_database_admin.ListDatabaseOperationsResponse: - """Post-rpc interceptor for list_database_operations - - DEPRECATED. Please use the `post_list_database_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_database_operations` interceptor runs - before the `post_list_database_operations_with_metadata` interceptor. - """ - return response - - def post_list_database_operations_with_metadata(self, response: spanner_database_admin.ListDatabaseOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_database_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_database_operations_with_metadata` - interceptor in new development instead of the `post_list_database_operations` interceptor. - When both interceptors are used, this `post_list_database_operations_with_metadata` interceptor runs after the - `post_list_database_operations` interceptor. The (possibly modified) response returned by - `post_list_database_operations` will be passed to - `post_list_database_operations_with_metadata`. - """ - return response, metadata - - def pre_list_database_roles(self, request: spanner_database_admin.ListDatabaseRolesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_database_roles - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_database_roles(self, response: spanner_database_admin.ListDatabaseRolesResponse) -> spanner_database_admin.ListDatabaseRolesResponse: - """Post-rpc interceptor for list_database_roles - - DEPRECATED. Please use the `post_list_database_roles_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_database_roles` interceptor runs - before the `post_list_database_roles_with_metadata` interceptor. - """ - return response - - def post_list_database_roles_with_metadata(self, response: spanner_database_admin.ListDatabaseRolesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_database_roles - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_database_roles_with_metadata` - interceptor in new development instead of the `post_list_database_roles` interceptor. - When both interceptors are used, this `post_list_database_roles_with_metadata` interceptor runs after the - `post_list_database_roles` interceptor. The (possibly modified) response returned by - `post_list_database_roles` will be passed to - `post_list_database_roles_with_metadata`. - """ - return response, metadata - - def pre_list_databases(self, request: spanner_database_admin.ListDatabasesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_databases - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_databases(self, response: spanner_database_admin.ListDatabasesResponse) -> spanner_database_admin.ListDatabasesResponse: - """Post-rpc interceptor for list_databases - - DEPRECATED. Please use the `post_list_databases_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_databases` interceptor runs - before the `post_list_databases_with_metadata` interceptor. - """ - return response - - def post_list_databases_with_metadata(self, response: spanner_database_admin.ListDatabasesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_databases - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_databases_with_metadata` - interceptor in new development instead of the `post_list_databases` interceptor. - When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the - `post_list_databases` interceptor. The (possibly modified) response returned by - `post_list_databases` will be passed to - `post_list_databases_with_metadata`. - """ - return response, metadata - - def pre_restore_database(self, request: spanner_database_admin.RestoreDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.RestoreDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for restore_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_restore_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for restore_database - - DEPRECATED. Please use the `post_restore_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_restore_database` interceptor runs - before the `post_restore_database_with_metadata` interceptor. - """ - return response - - def post_restore_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restore_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_restore_database_with_metadata` - interceptor in new development instead of the `post_restore_database` interceptor. - When both interceptors are used, this `post_restore_database_with_metadata` interceptor runs after the - `post_restore_database` interceptor. The (possibly modified) response returned by - `post_restore_database` will be passed to - `post_restore_database_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_backup(self, request: gsad_backup.UpdateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.UpdateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_backup(self, response: gsad_backup.Backup) -> gsad_backup.Backup: - """Post-rpc interceptor for update_backup - - DEPRECATED. Please use the `post_update_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_backup` interceptor runs - before the `post_update_backup_with_metadata` interceptor. - """ - return response - - def post_update_backup_with_metadata(self, response: gsad_backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_backup_with_metadata` - interceptor in new development instead of the `post_update_backup` interceptor. - When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the - `post_update_backup` interceptor. The (possibly modified) response returned by - `post_update_backup` will be passed to - `post_update_backup_with_metadata`. - """ - return response, metadata - - def pre_update_backup_schedule(self, request: gsad_backup_schedule.UpdateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.UpdateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: - """Post-rpc interceptor for update_backup_schedule - - DEPRECATED. Please use the `post_update_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_backup_schedule` interceptor runs - before the `post_update_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_update_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_backup_schedule_with_metadata` - interceptor in new development instead of the `post_update_backup_schedule` interceptor. - When both interceptors are used, this `post_update_backup_schedule_with_metadata` interceptor runs after the - `post_update_backup_schedule` interceptor. The (possibly modified) response returned by - `post_update_backup_schedule` will be passed to - `post_update_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_update_database(self, request: spanner_database_admin.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_database - - DEPRECATED. Please use the `post_update_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_database` interceptor runs - before the `post_update_database_with_metadata` interceptor. - """ - return response - - def post_update_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_database_with_metadata` - interceptor in new development instead of the `post_update_database` interceptor. - When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the - `post_update_database` interceptor. The (possibly modified) response returned by - `post_update_database` will be passed to - `post_update_database_with_metadata`. - """ - return response, metadata - - def pre_update_database_ddl(self, request: spanner_database_admin.UpdateDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_database_ddl - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_database_ddl(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_database_ddl - - DEPRECATED. Please use the `post_update_database_ddl_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_database_ddl` interceptor runs - before the `post_update_database_ddl_with_metadata` interceptor. - """ - return response - - def post_update_database_ddl_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database_ddl - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_database_ddl_with_metadata` - interceptor in new development instead of the `post_update_database_ddl` interceptor. - When both interceptors are used, this `post_update_database_ddl_with_metadata` interceptor runs after the - `post_update_database_ddl` interceptor. The (possibly modified) response returned by - `post_update_database_ddl` will be passed to - `post_update_database_ddl_with_metadata`. - """ - return response, metadata - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DatabaseAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DatabaseAdminRestInterceptor - - -class DatabaseAdminRestTransport(_BaseDatabaseAdminRestTransport): - """REST backend synchronous transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DatabaseAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DatabaseAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _AddSplitPoints(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.AddSplitPoints") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.AddSplitPointsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.AddSplitPointsResponse: - r"""Call the add split points method over HTTP. - - Args: - request (~.spanner_database_admin.AddSplitPointsRequest): - The request object. The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.AddSplitPointsResponse: - The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_http_options() - - request, metadata = self._interceptor.pre_add_split_points(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.AddSplitPoints", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "AddSplitPoints", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._AddSplitPoints._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.AddSplitPointsResponse() - pb_resp = spanner_database_admin.AddSplitPointsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_add_split_points(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_add_split_points_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.AddSplitPointsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.add_split_points", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "AddSplitPoints", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CopyBackup(_BaseDatabaseAdminRestTransport._BaseCopyBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CopyBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backup.CopyBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the copy backup method over HTTP. - - Args: - request (~.backup.CopyBackupRequest): - The request object. The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_http_options() - - request, metadata = self._interceptor.pre_copy_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CopyBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CopyBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CopyBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_copy_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_copy_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.copy_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CopyBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackup(_BaseDatabaseAdminRestTransport._BaseCreateBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CreateBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup.CreateBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create backup method over HTTP. - - Args: - request (~.gsad_backup.CreateBackupRequest): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_http_options() - - request, metadata = self._interceptor.pre_create_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CreateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CreateBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup_schedule.CreateBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Call the create backup schedule method over HTTP. - - Args: - request (~.gsad_backup_schedule.CreateBackupScheduleRequest): - The request object. The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gsad_backup_schedule.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_create_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CreateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gsad_backup_schedule.BackupSchedule() - pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_schedule_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup_schedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDatabase(_BaseDatabaseAdminRestTransport._BaseCreateDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CreateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.CreateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create database method over HTTP. - - Args: - request (~.spanner_database_admin.CreateDatabaseRequest): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_create_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CreateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackup(_BaseDatabaseAdminRestTransport._BaseDeleteBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DeleteBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.DeleteBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete backup method over HTTP. - - Args: - request (~.backup.DeleteBackupRequest): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DeleteBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DeleteBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteBackupSchedule(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DeleteBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup_schedule.DeleteBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete backup schedule method over HTTP. - - Args: - request (~.backup_schedule.DeleteBackupScheduleRequest): - The request object. The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DeleteBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DeleteBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DropDatabase(_BaseDatabaseAdminRestTransport._BaseDropDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DropDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.DropDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the drop database method over HTTP. - - Args: - request (~.spanner_database_admin.DropDatabaseRequest): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_http_options() - - request, metadata = self._interceptor.pre_drop_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DropDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DropDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DropDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetBackup(_BaseDatabaseAdminRestTransport._BaseGetBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.GetBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup.Backup: - r"""Call the get backup method over HTTP. - - Args: - request (~.backup.GetBackupRequest): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.Backup: - A backup of a Cloud Spanner database. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_http_options() - - request, metadata = self._interceptor.pre_get_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.Backup() - pb_resp = backup.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup.Backup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackupSchedule(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup_schedule.GetBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup_schedule.BackupSchedule: - r"""Call the get backup schedule method over HTTP. - - Args: - request (~.backup_schedule.GetBackupScheduleRequest): - The request object. The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup_schedule.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_get_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup_schedule.BackupSchedule() - pb_resp = backup_schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_schedule_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup_schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup_schedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabase(_BaseDatabaseAdminRestTransport._BaseGetDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.GetDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.Database: - r"""Call the get database method over HTTP. - - Args: - request (~.spanner_database_admin.GetDatabaseRequest): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.Database: - A Cloud Spanner database. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_http_options() - - request, metadata = self._interceptor.pre_get_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.Database() - pb_resp = spanner_database_admin.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetDatabaseDdl") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.GetDatabaseDdlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Call the get database ddl method over HTTP. - - Args: - request (~.spanner_database_admin.GetDatabaseDdlRequest): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_http_options() - - request, metadata = self._interceptor.pre_get_database_ddl(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabaseDdl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabaseDdl", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.GetDatabaseDdlResponse() - pb_resp = spanner_database_admin.GetDatabaseDdlResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database_ddl(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_ddl_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.GetDatabaseDdlResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database_ddl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabaseDdl", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetIamPolicy(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_iam_policy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _InternalUpdateGraphOperation(_BaseDatabaseAdminRestTransport._BaseInternalUpdateGraphOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.InternalUpdateGraphOperation") - - def __call__(self, - request: spanner_database_admin.InternalUpdateGraphOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: - raise NotImplementedError( - "Method InternalUpdateGraphOperation is not available over REST transport" - ) - class _ListBackupOperations(_BaseDatabaseAdminRestTransport._BaseListBackupOperations, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListBackupOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.ListBackupOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup.ListBackupOperationsResponse: - r"""Call the list backup operations method over HTTP. - - Args: - request (~.backup.ListBackupOperationsRequest): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.ListBackupOperationsResponse: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_operations(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListBackupOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.ListBackupOperationsResponse() - pb_resp = backup.ListBackupOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup.ListBackupOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_operations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackups(_BaseDatabaseAdminRestTransport._BaseListBackups, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListBackups") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.ListBackupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup.ListBackupsResponse: - r"""Call the list backups method over HTTP. - - Args: - request (~.backup.ListBackupsRequest): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.ListBackupsResponse: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListBackups._get_http_options() - - request, metadata = self._interceptor.pre_list_backups(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackups._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListBackups._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackups", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackups", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListBackups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.ListBackupsResponse() - pb_resp = backup.ListBackupsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backups(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backups_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup.ListBackupsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backups", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackups", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackupSchedules(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListBackupSchedules") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup_schedule.ListBackupSchedulesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup_schedule.ListBackupSchedulesResponse: - r"""Call the list backup schedules method over HTTP. - - Args: - request (~.backup_schedule.ListBackupSchedulesRequest): - The request object. The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup_schedule.ListBackupSchedulesResponse: - The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_schedules(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupSchedules", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupSchedules", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListBackupSchedules._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup_schedule.ListBackupSchedulesResponse() - pb_resp = backup_schedule.ListBackupSchedulesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_schedules(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_schedules_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup_schedule.ListBackupSchedulesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_schedules", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupSchedules", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabaseOperations(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListDatabaseOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.ListDatabaseOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.ListDatabaseOperationsResponse: - r"""Call the list database operations method over HTTP. - - Args: - request (~.spanner_database_admin.ListDatabaseOperationsRequest): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.ListDatabaseOperationsResponse: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_database_operations(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListDatabaseOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.ListDatabaseOperationsResponse() - pb_resp = spanner_database_admin.ListDatabaseOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_database_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_database_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.ListDatabaseOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_operations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabaseRoles(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListDatabaseRoles") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.ListDatabaseRolesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.ListDatabaseRolesResponse: - r"""Call the list database roles method over HTTP. - - Args: - request (~.spanner_database_admin.ListDatabaseRolesRequest): - The request object. The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.ListDatabaseRolesResponse: - The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_http_options() - - request, metadata = self._interceptor.pre_list_database_roles(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseRoles", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseRoles", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListDatabaseRoles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.ListDatabaseRolesResponse() - pb_resp = spanner_database_admin.ListDatabaseRolesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_database_roles(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_database_roles_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.ListDatabaseRolesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_roles", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseRoles", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabases(_BaseDatabaseAdminRestTransport._BaseListDatabases, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListDatabases") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.ListDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.ListDatabasesResponse: - r"""Call the list databases method over HTTP. - - Args: - request (~.spanner_database_admin.ListDatabasesRequest): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.ListDatabasesResponse: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_http_options() - - request, metadata = self._interceptor.pre_list_databases(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabases", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabases", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListDatabases._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.ListDatabasesResponse() - pb_resp = spanner_database_admin.ListDatabasesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_databases(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_databases_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.ListDatabasesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_databases", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabases", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RestoreDatabase(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.RestoreDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.RestoreDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore database method over HTTP. - - Args: - request (~.spanner_database_admin.RestoreDatabaseRequest): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_http_options() - - request, metadata = self._interceptor.pre_restore_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.RestoreDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "RestoreDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._RestoreDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restore_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restore_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.restore_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "RestoreDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.SetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.set_iam_policy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.TestIamPermissions", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.test_iam_permissions", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackup(_BaseDatabaseAdminRestTransport._BaseUpdateBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup.UpdateBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gsad_backup.Backup: - r"""Call the update backup method over HTTP. - - Args: - request (~.gsad_backup.UpdateBackupRequest): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gsad_backup.Backup: - A backup of a Cloud Spanner database. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_http_options() - - request, metadata = self._interceptor.pre_update_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gsad_backup.Backup() - pb_resp = gsad_backup.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gsad_backup.Backup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup_schedule.UpdateBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Call the update backup schedule method over HTTP. - - Args: - request (~.gsad_backup_schedule.UpdateBackupScheduleRequest): - The request object. The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gsad_backup_schedule.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_update_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gsad_backup_schedule.BackupSchedule() - pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_schedule_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup_schedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabase(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.UpdateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update database method over HTTP. - - Args: - request (~.spanner_database_admin.UpdateDatabaseRequest): - The request object. The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_update_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateDatabaseDdl") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.UpdateDatabaseDdlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update database ddl method over HTTP. - - Args: - request (~.spanner_database_admin.UpdateDatabaseDdlRequest): - The request object. Enqueues the given DDL statements to be applied, in - order but not necessarily all at once, to the database - schema at some point (or points) in the future. The - server checks that the statements are executable - (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - ``NULL`` value in a column to which ``NOT NULL`` would - be added). If a statement fails, all subsequent - statements in the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_http_options() - - request, metadata = self._interceptor.pre_update_database_ddl(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabaseDdl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabaseDdl", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database_ddl(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_ddl_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database_ddl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabaseDdl", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - spanner_database_admin.AddSplitPointsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AddSplitPoints(self._session, self._host, self._interceptor) # type: ignore - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CopyBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DropDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - backup.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - backup_schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - spanner_database_admin.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - spanner_database_admin.GetDatabaseDdlResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - spanner_database_admin.InternalUpdateGraphOperationResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InternalUpdateGraphOperation(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - backup.ListBackupOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - backup.ListBackupsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - backup_schedule.ListBackupSchedulesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - spanner_database_admin.ListDatabaseOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabaseOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - spanner_database_admin.ListDatabaseRolesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabaseRoles(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - spanner_database_admin.ListDatabasesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - gsad_backup.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseDatabaseAdminRestTransport._BaseCancelOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CancelOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseDatabaseAdminRestTransport._BaseDeleteOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseDatabaseAdminRestTransport._BaseGetOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseDatabaseAdminRestTransport._BaseListOperations, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DatabaseAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py deleted file mode 100644 index 0cfaa08199..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py +++ /dev/null @@ -1,1378 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseDatabaseAdminRestTransport(DatabaseAdminTransport): - """Base REST backend transport for DatabaseAdmin. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseAddSplitPoints: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.AddSplitPointsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCopyBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/backups:copy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.CopyBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCopyBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/backups', - 'body': 'backup', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup.CreateBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupScheduleId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', - 'body': 'backup_schedule', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup_schedule.CreateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/databases', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.DeleteBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup_schedule.DeleteBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDropDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.DropDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseDropDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.GetBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup_schedule.GetBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabaseDdl: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.GetDatabaseDdlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:getIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:getIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseInternalUpdateGraphOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseListBackupOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/backupOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.ListBackupOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackups: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/backups', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.ListBackupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackups._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackupSchedules: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup_schedule.ListBackupSchedulesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabaseOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/databaseOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.ListDatabaseOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabaseRoles: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.ListDatabaseRolesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabases: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/databases', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabases._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRestoreDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/databases:restore', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.RestoreDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/databaseRoles/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup.name=projects/*/instances/*/backups/*}', - 'body': 'backup', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup.UpdateBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}', - 'body': 'backup_schedule', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{database.name=projects/*/instances/*/databases/*}', - 'body': 'database', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabaseDdl: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.UpdateDatabaseDdlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseDatabaseAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py deleted file mode 100644 index 84cb902d6c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py +++ /dev/null @@ -1,148 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .backup import ( - Backup, - BackupInfo, - BackupInstancePartition, - CopyBackupEncryptionConfig, - CopyBackupMetadata, - CopyBackupRequest, - CreateBackupEncryptionConfig, - CreateBackupMetadata, - CreateBackupRequest, - DeleteBackupRequest, - FullBackupSpec, - GetBackupRequest, - IncrementalBackupSpec, - ListBackupOperationsRequest, - ListBackupOperationsResponse, - ListBackupsRequest, - ListBackupsResponse, - UpdateBackupRequest, -) -from .backup_schedule import ( - BackupSchedule, - BackupScheduleSpec, - CreateBackupScheduleRequest, - CrontabSpec, - DeleteBackupScheduleRequest, - GetBackupScheduleRequest, - ListBackupSchedulesRequest, - ListBackupSchedulesResponse, - UpdateBackupScheduleRequest, -) -from .common import ( - EncryptionConfig, - EncryptionInfo, - OperationProgress, - DatabaseDialect, -) -from .spanner_database_admin import ( - AddSplitPointsRequest, - AddSplitPointsResponse, - CreateDatabaseMetadata, - CreateDatabaseRequest, - Database, - DatabaseRole, - DdlStatementActionInfo, - DropDatabaseRequest, - GetDatabaseDdlRequest, - GetDatabaseDdlResponse, - GetDatabaseRequest, - InternalUpdateGraphOperationRequest, - InternalUpdateGraphOperationResponse, - ListDatabaseOperationsRequest, - ListDatabaseOperationsResponse, - ListDatabaseRolesRequest, - ListDatabaseRolesResponse, - ListDatabasesRequest, - ListDatabasesResponse, - OptimizeRestoredDatabaseMetadata, - RestoreDatabaseEncryptionConfig, - RestoreDatabaseMetadata, - RestoreDatabaseRequest, - RestoreInfo, - SplitPoints, - UpdateDatabaseDdlMetadata, - UpdateDatabaseDdlRequest, - UpdateDatabaseMetadata, - UpdateDatabaseRequest, - RestoreSourceType, -) - -__all__ = ( - 'Backup', - 'BackupInfo', - 'BackupInstancePartition', - 'CopyBackupEncryptionConfig', - 'CopyBackupMetadata', - 'CopyBackupRequest', - 'CreateBackupEncryptionConfig', - 'CreateBackupMetadata', - 'CreateBackupRequest', - 'DeleteBackupRequest', - 'FullBackupSpec', - 'GetBackupRequest', - 'IncrementalBackupSpec', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'UpdateBackupRequest', - 'BackupSchedule', - 'BackupScheduleSpec', - 'CreateBackupScheduleRequest', - 'CrontabSpec', - 'DeleteBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'UpdateBackupScheduleRequest', - 'EncryptionConfig', - 'EncryptionInfo', - 'OperationProgress', - 'DatabaseDialect', - 'AddSplitPointsRequest', - 'AddSplitPointsResponse', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'Database', - 'DatabaseRole', - 'DdlStatementActionInfo', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'GetDatabaseRequest', - 'InternalUpdateGraphOperationRequest', - 'InternalUpdateGraphOperationResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'ListDatabaseRolesRequest', - 'ListDatabaseRolesResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'OptimizeRestoredDatabaseMetadata', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'RestoreDatabaseRequest', - 'RestoreInfo', - 'SplitPoints', - 'UpdateDatabaseDdlMetadata', - 'UpdateDatabaseDdlRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseRequest', - 'RestoreSourceType', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py deleted file mode 100644 index d89cfa44b5..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py +++ /dev/null @@ -1,1097 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'Backup', - 'CreateBackupRequest', - 'CreateBackupMetadata', - 'CopyBackupRequest', - 'CopyBackupMetadata', - 'UpdateBackupRequest', - 'GetBackupRequest', - 'DeleteBackupRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'BackupInfo', - 'CreateBackupEncryptionConfig', - 'CopyBackupEncryptionConfig', - 'FullBackupSpec', - 'IncrementalBackupSpec', - 'BackupInstancePartition', - }, -) - - -class Backup(proto.Message): - r"""A backup of a Cloud Spanner database. - - Attributes: - database (str): - Required for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. Name of the database from which this backup was - created. This needs to be in the same instance as the - backup. Values are of the form - ``projects//instances//databases/``. - version_time (google.protobuf.timestamp_pb2.Timestamp): - The backup will contain an externally consistent copy of the - database at the timestamp specified by ``version_time``. If - ``version_time`` is not specified, the system will set - ``version_time`` to the ``create_time`` of the backup. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. The expiration time of the backup, with - microseconds granularity that must be at least 6 hours and - at most 366 days from the time the CreateBackup request is - processed. Once the ``expire_time`` has passed, the backup - is eligible to be automatically deleted by Cloud Spanner to - free the resources used by the backup. - name (str): - Output only for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. Required for the - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup] - operation. - - A globally unique identifier for the backup which cannot be - changed. Values are of the form - ``projects//instances//backups/[a-z][a-z0-9_\-]*[a-z0-9]`` - The final segment of the name must be between 2 and 60 - characters in length. - - The backup is stored in the location(s) specified in the - instance configuration of the instance containing the - backup, identified by the prefix of the backup name of the - form ``projects//instances/``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request is received. If the request does not specify - ``version_time``, the ``version_time`` of the backup will be - equivalent to the ``create_time``. - size_bytes (int): - Output only. Size of the backup in bytes. - freeable_size_bytes (int): - Output only. The number of bytes that will be - freed by deleting this backup. This value will - be zero if, for example, this backup is part of - an incremental backup chain and younger backups - in the chain require that we keep its data. For - backups not in an incremental backup chain, this - is always the size of the backup. This value may - change if backups on the same chain get created, - deleted or expired. - exclusive_size_bytes (int): - Output only. For a backup in an incremental - backup chain, this is the storage space needed - to keep the data that has changed since the - previous backup. For all other backups, this is - always the size of the backup. This value may - change if backups on the same chain get deleted - or expired. - - This field can be used to calculate the total - storage space used by a set of backups. For - example, the total space used by all backups of - a database can be computed by summing up this - field. - state (google.cloud.spanner_admin_database_v1.types.Backup.State): - Output only. The current state of the backup. - referencing_databases (MutableSequence[str]): - Output only. The names of the restored databases that - reference the backup. The database names are of the form - ``projects//instances//databases/``. - Referencing databases may exist in different instances. The - existence of any referencing database prevents the backup - from being deleted. When a restored database from the backup - enters the ``READY`` state, the reference to the backup is - removed. - encryption_info (google.cloud.spanner_admin_database_v1.types.EncryptionInfo): - Output only. The encryption information for - the backup. - encryption_information (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): - Output only. The encryption information for the backup, - whether it is protected by one or more KMS keys. The - information includes all Cloud KMS key versions used to - encrypt the backup. The - ``encryption_status' field inside of each``\ EncryptionInfo\` - is not populated. At least one of the key versions must be - available for the backup to be restored. If a key version is - revoked in the middle of a restore, the restore behavior is - undefined. - database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): - Output only. The database dialect information - for the backup. - referencing_backups (MutableSequence[str]): - Output only. The names of the destination backups being - created by copying this source backup. The backup names are - of the form - ``projects//instances//backups/``. - Referencing backups may exist in different instances. The - existence of any referencing backup prevents the backup from - being deleted. When the copy operation is done (either - successfully completed or cancelled or the destination - backup is deleted), the reference to the backup is removed. - max_expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The max allowed expiration time of the backup, - with microseconds granularity. A backup's expiration time - can be configured in multiple APIs: CreateBackup, - UpdateBackup, CopyBackup. When updating or copying an - existing backup, the expiration time specified must be less - than ``Backup.max_expire_time``. - backup_schedules (MutableSequence[str]): - Output only. List of backup schedule URIs - that are associated with creating this backup. - This is only applicable for scheduled backups, - and is empty for on-demand backups. - - To optimize for storage, whenever possible, - multiple schedules are collapsed together to - create one backup. In such cases, this field - captures the list of all backup schedule URIs - that are associated with creating this backup. - If collapsing is not done, then this field - captures the single backup schedule URI - associated with creating this backup. - incremental_backup_chain_id (str): - Output only. Populated only for backups in an incremental - backup chain. Backups share the same chain id if and only if - they belong to the same incremental backup chain. Use this - field to determine which backups are part of the same - incremental backup chain. The ordering of backups in the - chain can be determined by ordering the backup - ``version_time``. - oldest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Data deleted at a time older - than this is guaranteed not to be retained in - order to support this backup. For a backup in an - incremental backup chain, this is the version - time of the oldest backup that exists or ever - existed in the chain. For all other backups, - this is the version time of the backup. This - field can be used to understand what data is - being retained by the backup system. - instance_partitions (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupInstancePartition]): - Output only. The instance partition(s) storing the backup. - - This is the same as the list of the instance partition(s) - that the database had footprint in at the backup's - ``version_time``. - """ - class State(proto.Enum): - r"""Indicates the current state of the backup. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The pending backup is still being created. Operations on the - backup may fail with ``FAILED_PRECONDITION`` in this state. - READY (2): - The backup is complete and ready for use. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - database: str = proto.Field( - proto.STRING, - number=2, - ) - version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - size_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - freeable_size_bytes: int = proto.Field( - proto.INT64, - number=15, - ) - exclusive_size_bytes: int = proto.Field( - proto.INT64, - number=16, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - referencing_databases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - encryption_info: common.EncryptionInfo = proto.Field( - proto.MESSAGE, - number=8, - message=common.EncryptionInfo, - ) - encryption_information: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( - proto.MESSAGE, - number=13, - message=common.EncryptionInfo, - ) - database_dialect: common.DatabaseDialect = proto.Field( - proto.ENUM, - number=10, - enum=common.DatabaseDialect, - ) - referencing_backups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=11, - ) - max_expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - backup_schedules: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=14, - ) - incremental_backup_chain_id: str = proto.Field( - proto.STRING, - number=17, - ) - oldest_version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=18, - message=timestamp_pb2.Timestamp, - ) - instance_partitions: MutableSequence['BackupInstancePartition'] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message='BackupInstancePartition', - ) - - -class CreateBackupRequest(proto.Message): - r"""The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - - Attributes: - parent (str): - Required. The name of the instance in which the backup will - be created. This must be the same instance that contains the - database the backup will be created from. The backup will be - stored in the location(s) specified in the instance - configuration of this instance. Values are of the form - ``projects//instances/``. - backup_id (str): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full backup - name of the form - ``projects//instances//backups/``. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to create. - encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): - Optional. The encryption configuration used to encrypt the - backup. If this field is not specified, the backup will use - the same encryption configuration as the database by - default, namely - [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] - = ``USE_DATABASE_ENCRYPTION``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup: 'Backup' = proto.Field( - proto.MESSAGE, - number=3, - message='Backup', - ) - encryption_config: 'CreateBackupEncryptionConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='CreateBackupEncryptionConfig', - ) - - -class CreateBackupMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - - Attributes: - name (str): - The name of the backup being created. - database (str): - The name of the database the backup is - created from. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of this operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=3, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class CopyBackupRequest(proto.Message): - r"""The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - - Attributes: - parent (str): - Required. The name of the destination instance that will - contain the backup copy. Values are of the form: - ``projects//instances/``. - backup_id (str): - Required. The id of the backup copy. The ``backup_id`` - appended to ``parent`` forms the full backup_uri of the form - ``projects//instances//backups/``. - source_backup (str): - Required. The source backup to be copied. The source backup - needs to be in READY state for it to be copied. Once - CopyBackup is in progress, the source backup cannot be - deleted or cleaned up on expiration until CopyBackup is - finished. Values are of the form: - ``projects//instances//backups/``. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The expiration time of the backup in microsecond - granularity. The expiration time must be at least 6 hours - and at most 366 days from the ``create_time`` of the source - backup. Once the ``expire_time`` has passed, the backup is - eligible to be automatically deleted by Cloud Spanner to - free the resources used by the backup. - encryption_config (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig): - Optional. The encryption configuration used to encrypt the - backup. If this field is not specified, the backup will use - the same encryption configuration as the source backup by - default, namely - [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] - = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_id: str = proto.Field( - proto.STRING, - number=2, - ) - source_backup: str = proto.Field( - proto.STRING, - number=3, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - encryption_config: 'CopyBackupEncryptionConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='CopyBackupEncryptionConfig', - ) - - -class CopyBackupMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - - Attributes: - name (str): - The name of the backup being created through the copy - operation. Values are of the form - ``projects//instances//backups/``. - source_backup (str): - The name of the source backup that is being copied. Values - are of the form - ``projects//instances//backups/``. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of CopyBackup operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - source_backup: str = proto.Field( - proto.STRING, - number=2, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=3, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateBackupRequest(proto.Message): - r"""The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - - Attributes: - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only supported - for the following fields: - - - ``backup.expire_time``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be updated. - This mask is relative to the Backup resource, not to the - request message. The field mask must always be specified; - this prevents any future fields from being erased - accidentally by clients that do not know about them. - """ - - backup: 'Backup' = proto.Field( - proto.MESSAGE, - number=1, - message='Backup', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetBackupRequest(proto.Message): - r"""The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - - Attributes: - name (str): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupRequest(proto.Message): - r"""The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - - Attributes: - name (str): - Required. Name of the backup to delete. Values are of the - form - ``projects//instances//backups/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupsRequest(proto.Message): - r"""The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Attributes: - parent (str): - Required. The instance to list backups from. Values are of - the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned backups. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Backup][google.spanner.admin.database.v1.Backup] are - eligible for filtering: - - - ``name`` - - ``database`` - - ``state`` - - ``create_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``expire_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``version_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``size_bytes`` - - ``backup_schedules`` - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic, but you can specify AND, OR, and - NOT logic explicitly. - - Here are a few examples: - - - ``name:Howl`` - The backup's name contains the string - "howl". - - ``database:prod`` - The database's name contains the - string "prod". - - ``state:CREATING`` - The backup is pending creation. - - ``state:READY`` - The backup is fully created and ready - for use. - - ``(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`` - - The backup name contains the string "howl" and - ``create_time`` of the backup is before - 2018-03-28T14:50:00Z. - - ``expire_time < \"2018-03-28T14:50:00Z\"`` - The backup - ``expire_time`` is before 2018-03-28T14:50:00Z. - - ``size_bytes > 10000000000`` - The backup's size is - greater than 10GB - - ``backup_schedules:daily`` - The backup is created from a - schedule with "daily" in its name. - page_size (int): - Number of backups to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupsResponse.next_page_token] - from a previous - [ListBackupsResponse][google.spanner.admin.database.v1.ListBackupsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupsResponse(proto.Message): - r"""The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Attributes: - backups (MutableSequence[google.cloud.spanner_admin_database_v1.types.Backup]): - The list of matching backups. Backups returned are ordered - by ``create_time`` in descending order, starting from the - most recent ``create_time``. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups] - call to fetch more of the matching backups. - """ - - @property - def raw_page(self): - return self - - backups: MutableSequence['Backup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Backup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBackupOperationsRequest(proto.Message): - r"""The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Attributes: - parent (str): - Required. The instance of the backup operations. Values are - of the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned backup - operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [operation][google.longrunning.Operation] are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] - is - ``type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first if filtering on - metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic, but you can specify AND, OR, and - NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``metadata.database:prod`` - Returns operations where: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - - The source database name of backup contains the string - "prod". - - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``(metadata.name:howl) AND`` - ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - - The backup name contains the string "howl". - - The operation started before 2018-03-28T14:50:00Z. - - The operation resulted in an error. - - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` - ``(metadata.source_backup:test) AND`` - ``(metadata.progress.start_time < \"2022-01-18T14:50:00Z\") AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - - The source backup name contains the string "test". - - The operation started before 2022-01-18T14:50:00Z. - - The operation resulted in an error. - - - ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``(metadata.database:test_db)) OR`` - ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` - ``(metadata.source_backup:test_bkp)) AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata matches either of criteria: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] - AND the source database name of the backup contains - the string "test_db" - - The operation's metadata type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata] - AND the source backup name contains the string - "test_bkp" - - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupOperationsResponse.next_page_token] - from a previous - [ListBackupOperationsResponse][google.spanner.admin.database.v1.ListBackupOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupOperationsResponse(proto.Message): - r"""The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching backup [long-running - operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the backup's name. The operation's - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that are pending or have - completed/failed/canceled within the last 7 days. Operations - returned are ordered by - ``operation.metadata.value.progress.start_time`` in - descending order starting from the most recently started - operation. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class BackupInfo(proto.Message): - r"""Information about a backup. - - Attributes: - backup (str): - Name of the backup. - version_time (google.protobuf.timestamp_pb2.Timestamp): - The backup contains an externally consistent copy of - ``source_database`` at the timestamp specified by - ``version_time``. If the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request did not specify ``version_time``, the - ``version_time`` of the backup is equivalent to the - ``create_time``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request was received. - source_database (str): - Name of the database the backup was created - from. - """ - - backup: str = proto.Field( - proto.STRING, - number=1, - ) - version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - source_database: str = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateBackupEncryptionConfig(proto.Message): - r"""Encryption configuration for the backup to create. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): - Required. The encryption type of the backup. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to protect the - backup. This field should be set only when - [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Optional. Specifies the KMS configuration for the one or - more keys used to protect the backup. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - - The keys referenced by kms_key_names must fully cover all - regions of the backup's instance configuration. Some - examples: - - - For single region instance configs, specify a single - regional location KMS key. - - For multi-regional instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For an instance config of type USER_MANAGED, please - specify only regional location KMS keys to cover each - region in the instance config. Multi-regional location KMS - keys are not supported for USER_MANAGED instance configs. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the backup. - - Values: - ENCRYPTION_TYPE_UNSPECIFIED (0): - Unspecified. Do not use. - USE_DATABASE_ENCRYPTION (1): - Use the same encryption configuration as the database. This - is the default option when - [encryption_config][google.spanner.admin.database.v1.CreateBackupEncryptionConfig] - is empty. For example, if the database is using - ``Customer_Managed_Encryption``, the backup will be using - the same Cloud KMS key as the database. - GOOGLE_DEFAULT_ENCRYPTION (2): - Use Google default encryption. - CUSTOMER_MANAGED_ENCRYPTION (3): - Use customer managed encryption. If specified, - ``kms_key_name`` must contain a valid Cloud KMS key. - """ - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_DATABASE_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type: EncryptionType = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CopyBackupEncryptionConfig(proto.Message): - r"""Encryption configuration for the copied backup. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig.EncryptionType): - Required. The encryption type of the backup. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to protect the - backup. This field should be set only when - [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Optional. Specifies the KMS configuration for the one or - more keys used to protect the backup. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - Kms keys specified can be in any order. - - The keys referenced by kms_key_names must fully cover all - regions of the backup's instance configuration. Some - examples: - - - For single region instance configs, specify a single - regional location KMS key. - - For multi-regional instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For an instance config of type USER_MANAGED, please - specify only regional location KMS keys to cover each - region in the instance config. Multi-regional location KMS - keys are not supported for USER_MANAGED instance configs. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the backup. - - Values: - ENCRYPTION_TYPE_UNSPECIFIED (0): - Unspecified. Do not use. - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): - This is the default option for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] - when - [encryption_config][google.spanner.admin.database.v1.CopyBackupEncryptionConfig] - is not specified. For example, if the source backup is using - ``Customer_Managed_Encryption``, the backup will be using - the same Cloud KMS key as the source backup. - GOOGLE_DEFAULT_ENCRYPTION (2): - Use Google default encryption. - CUSTOMER_MANAGED_ENCRYPTION (3): - Use customer managed encryption. If specified, either - ``kms_key_name`` or ``kms_key_names`` must contain valid - Cloud KMS key(s). - """ - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type: EncryptionType = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class FullBackupSpec(proto.Message): - r"""The specification for full backups. - A full backup stores the entire contents of the database at a - given version time. - - """ - - -class IncrementalBackupSpec(proto.Message): - r"""The specification for incremental backup chains. - An incremental backup stores the delta of changes between a - previous backup and the database contents at a given version - time. An incremental backup chain consists of a full backup and - zero or more successive incremental backups. The first backup - created for an incremental backup chain is always a full backup. - - """ - - -class BackupInstancePartition(proto.Message): - r"""Instance partition information for the backup. - - Attributes: - instance_partition (str): - A unique identifier for the instance partition. Values are - of the form - ``projects//instances//instancePartitions/`` - """ - - instance_partition: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py deleted file mode 100644 index b69e3b08ee..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py +++ /dev/null @@ -1,369 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'BackupScheduleSpec', - 'BackupSchedule', - 'CrontabSpec', - 'CreateBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'DeleteBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'UpdateBackupScheduleRequest', - }, -) - - -class BackupScheduleSpec(proto.Message): - r"""Defines specifications of the backup schedule. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cron_spec (google.cloud.spanner_admin_database_v1.types.CrontabSpec): - Cron style schedule specification. - - This field is a member of `oneof`_ ``schedule_spec``. - """ - - cron_spec: 'CrontabSpec' = proto.Field( - proto.MESSAGE, - number=1, - oneof='schedule_spec', - message='CrontabSpec', - ) - - -class BackupSchedule(proto.Message): - r"""BackupSchedule expresses the automated backup creation - specification for a Spanner database. - Next ID: 10 - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. Output only for the - [CreateBackupSchedule][DatabaseAdmin.CreateBackupSchededule] - operation. Required for the - [UpdateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule] - operation. A globally unique identifier for the backup - schedule which cannot be changed. Values are of the form - ``projects//instances//databases//backupSchedules/[a-z][a-z0-9_\-]*[a-z0-9]`` - The final segment of the name must be between 2 and 60 - characters in length. - spec (google.cloud.spanner_admin_database_v1.types.BackupScheduleSpec): - Optional. The schedule specification based on - which the backup creations are triggered. - retention_duration (google.protobuf.duration_pb2.Duration): - Optional. The retention duration of a backup - that must be at least 6 hours and at most 366 - days. The backup is eligible to be automatically - deleted once the retention period has elapsed. - encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): - Optional. The encryption configuration that - will be used to encrypt the backup. If this - field is not specified, the backup will use the - same encryption configuration as the database. - full_backup_spec (google.cloud.spanner_admin_database_v1.types.FullBackupSpec): - The schedule creates only full backups. - - This field is a member of `oneof`_ ``backup_type_spec``. - incremental_backup_spec (google.cloud.spanner_admin_database_v1.types.IncrementalBackupSpec): - The schedule creates incremental backup - chains. - - This field is a member of `oneof`_ ``backup_type_spec``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which the - schedule was last updated. If the schedule has - never been updated, this field contains the - timestamp when the schedule was first created. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - spec: 'BackupScheduleSpec' = proto.Field( - proto.MESSAGE, - number=6, - message='BackupScheduleSpec', - ) - retention_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - encryption_config: backup.CreateBackupEncryptionConfig = proto.Field( - proto.MESSAGE, - number=4, - message=backup.CreateBackupEncryptionConfig, - ) - full_backup_spec: backup.FullBackupSpec = proto.Field( - proto.MESSAGE, - number=7, - oneof='backup_type_spec', - message=backup.FullBackupSpec, - ) - incremental_backup_spec: backup.IncrementalBackupSpec = proto.Field( - proto.MESSAGE, - number=8, - oneof='backup_type_spec', - message=backup.IncrementalBackupSpec, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - -class CrontabSpec(proto.Message): - r"""CrontabSpec can be used to specify the version time and - frequency at which the backup should be created. - - Attributes: - text (str): - Required. Textual representation of the crontab. User can - customize the backup frequency and the backup version time - using the cron expression. The version time must be in UTC - timezone. - - The backup will contain an externally consistent copy of the - database at the version time. Allowed frequencies are 12 - hour, 1 day, 1 week and 1 month. Examples of valid cron - specifications: - - - ``0 2/12 * * *`` : every 12 hours at (2, 14) hours past - midnight in UTC. - - ``0 2,14 * * *`` : every 12 hours at (2,14) hours past - midnight in UTC. - - ``0 2 * * *`` : once a day at 2 past midnight in UTC. - - ``0 2 * * 0`` : once a week every Sunday at 2 past - midnight in UTC. - - ``0 2 8 * *`` : once a month on 8th day at 2 past midnight - in UTC. - time_zone (str): - Output only. The time zone of the times in - ``CrontabSpec.text``. Currently only UTC is supported. - creation_window (google.protobuf.duration_pb2.Duration): - Output only. Schedule backups will contain an externally - consistent copy of the database at the version time - specified in ``schedule_spec.cron_spec``. However, Spanner - may not initiate the creation of the scheduled backups at - that version time. Spanner will initiate the creation of - scheduled backups within the time window bounded by the - version_time specified in ``schedule_spec.cron_spec`` and - version_time + ``creation_window``. - """ - - text: str = proto.Field( - proto.STRING, - number=1, - ) - time_zone: str = proto.Field( - proto.STRING, - number=2, - ) - creation_window: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - - -class CreateBackupScheduleRequest(proto.Message): - r"""The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - - Attributes: - parent (str): - Required. The name of the database that this - backup schedule applies to. - backup_schedule_id (str): - Required. The Id to use for the backup schedule. The - ``backup_schedule_id`` appended to ``parent`` forms the full - backup schedule name of the form - ``projects//instances//databases//backupSchedules/``. - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_schedule_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup_schedule: 'BackupSchedule' = proto.Field( - proto.MESSAGE, - number=3, - message='BackupSchedule', - ) - - -class GetBackupScheduleRequest(proto.Message): - r"""The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - - Attributes: - name (str): - Required. The name of the schedule to retrieve. Values are - of the form - ``projects//instances//databases//backupSchedules/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupScheduleRequest(proto.Message): - r"""The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - - Attributes: - name (str): - Required. The name of the schedule to delete. Values are of - the form - ``projects//instances//databases//backupSchedules/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupSchedulesRequest(proto.Message): - r"""The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Attributes: - parent (str): - Required. Database is the parent resource - whose backup schedules should be listed. Values - are of the form - projects//instances//databases/ - page_size (int): - Optional. Number of backup schedules to be - returned in the response. If 0 or less, defaults - to the server's maximum allowed page size. - page_token (str): - Optional. If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupSchedulesResponse.next_page_token] - from a previous - [ListBackupSchedulesResponse][google.spanner.admin.database.v1.ListBackupSchedulesResponse] - to the same ``parent``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupSchedulesResponse(proto.Message): - r"""The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Attributes: - backup_schedules (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupSchedule]): - The list of backup schedules for a database. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules] - call to fetch more of the schedules. - """ - - @property - def raw_page(self): - return self - - backup_schedules: MutableSequence['BackupSchedule'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BackupSchedule', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateBackupScheduleRequest(proto.Message): - r"""The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - - Attributes: - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to update. - ``backup_schedule.name``, and the fields to be updated as - specified by ``update_mask`` are required. Other fields are - ignored. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - the BackupSchedule resource should be updated. - This mask is relative to the BackupSchedule - resource, not to the request message. The field - mask must always be specified; this prevents any - future fields from being erased accidentally. - """ - - backup_schedule: 'BackupSchedule' = proto.Field( - proto.MESSAGE, - number=1, - message='BackupSchedule', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py deleted file mode 100644 index 40606ba0e5..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'DatabaseDialect', - 'OperationProgress', - 'EncryptionConfig', - 'EncryptionInfo', - }, -) - - -class DatabaseDialect(proto.Enum): - r"""Indicates the dialect type of a database. - - Values: - DATABASE_DIALECT_UNSPECIFIED (0): - Default value. This value will create a database with the - GOOGLE_STANDARD_SQL dialect. - GOOGLE_STANDARD_SQL (1): - GoogleSQL supported SQL. - POSTGRESQL (2): - PostgreSQL supported SQL. - """ - DATABASE_DIALECT_UNSPECIFIED = 0 - GOOGLE_STANDARD_SQL = 1 - POSTGRESQL = 2 - - -class OperationProgress(proto.Message): - r"""Encapsulates progress related information for a Cloud Spanner - long running operation. - - Attributes: - progress_percent (int): - Percent completion of the operation. - Values are between 0 and 100 inclusive. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time the request was received. - end_time (google.protobuf.timestamp_pb2.Timestamp): - If set, the time at which this operation - failed or was completed successfully. - """ - - progress_percent: int = proto.Field( - proto.INT32, - number=1, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class EncryptionConfig(proto.Message): - r"""Encryption configuration for a Cloud Spanner database. - - Attributes: - kms_key_name (str): - The Cloud KMS key to be used for encrypting and decrypting - the database. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Specifies the KMS configuration for the one or more keys - used to encrypt the database. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - - The keys referenced by kms_key_names must fully cover all - regions of the database instance configuration. Some - examples: - - - For single region database instance configs, specify a - single regional location KMS key. - - For multi-regional database instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For a database instance config of type USER_MANAGED, - please specify only regional location KMS keys to cover - each region in the instance config. Multi-regional - location KMS keys are not supported for USER_MANAGED - instance configs. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class EncryptionInfo(proto.Message): - r"""Encryption information for a Cloud Spanner database or - backup. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.EncryptionInfo.Type): - Output only. The type of encryption. - encryption_status (google.rpc.status_pb2.Status): - Output only. If present, the status of a - recent encrypt/decrypt call on underlying data - for this database or backup. Regardless of - status, data is always encrypted at rest. - kms_key_version (str): - Output only. A Cloud KMS key version that is - being used to protect the database or backup. - """ - class Type(proto.Enum): - r"""Possible encryption types. - - Values: - TYPE_UNSPECIFIED (0): - Encryption type was not specified, though - data at rest remains encrypted. - GOOGLE_DEFAULT_ENCRYPTION (1): - The data is encrypted at rest with a key that - is fully managed by Google. No key version or - status will be populated. This is the default - state. - CUSTOMER_MANAGED_ENCRYPTION (2): - The data is encrypted at rest with a key that is managed by - the customer. The active version of the key. - ``kms_key_version`` will be populated, and - ``encryption_status`` may be populated. - """ - TYPE_UNSPECIFIED = 0 - GOOGLE_DEFAULT_ENCRYPTION = 1 - CUSTOMER_MANAGED_ENCRYPTION = 2 - - encryption_type: Type = proto.Field( - proto.ENUM, - number=3, - enum=Type, - ) - encryption_status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - kms_key_version: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py deleted file mode 100644 index 3d882c8443..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ /dev/null @@ -1,1348 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'RestoreSourceType', - 'RestoreInfo', - 'Database', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'CreateDatabaseRequest', - 'CreateDatabaseMetadata', - 'GetDatabaseRequest', - 'UpdateDatabaseRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseDdlRequest', - 'DdlStatementActionInfo', - 'UpdateDatabaseDdlMetadata', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'RestoreDatabaseRequest', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'OptimizeRestoredDatabaseMetadata', - 'DatabaseRole', - 'ListDatabaseRolesRequest', - 'ListDatabaseRolesResponse', - 'AddSplitPointsRequest', - 'AddSplitPointsResponse', - 'SplitPoints', - 'InternalUpdateGraphOperationRequest', - 'InternalUpdateGraphOperationResponse', - }, -) - - -class RestoreSourceType(proto.Enum): - r"""Indicates the type of the restore source. - - Values: - TYPE_UNSPECIFIED (0): - No restore associated. - BACKUP (1): - A backup was used as the source of the - restore. - """ - TYPE_UNSPECIFIED = 0 - BACKUP = 1 - - -class RestoreInfo(proto.Message): - r"""Information about the database restore. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): - The type of the restore source. - backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): - Information about the backup used to restore - the database. The backup may no longer exist. - - This field is a member of `oneof`_ ``source_info``. - """ - - source_type: 'RestoreSourceType' = proto.Field( - proto.ENUM, - number=1, - enum='RestoreSourceType', - ) - backup_info: gsad_backup.BackupInfo = proto.Field( - proto.MESSAGE, - number=2, - oneof='source_info', - message=gsad_backup.BackupInfo, - ) - - -class Database(proto.Message): - r"""A Cloud Spanner database. - - Attributes: - name (str): - Required. The name of the database. Values are of the form - ``projects//instances//databases/``, - where ```` is as specified in the - ``CREATE DATABASE`` statement. This name can be passed to - other API methods to identify the database. - state (google.cloud.spanner_admin_database_v1.types.Database.State): - Output only. The current database state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If exists, the time at which the - database creation started. - restore_info (google.cloud.spanner_admin_database_v1.types.RestoreInfo): - Output only. Applicable only for restored - databases. Contains information about the - restore source. - encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): - Output only. For databases that are using - customer managed encryption, this field contains - the encryption configuration for the database. - For databases that are using Google default or - other types of encryption, this field is empty. - encryption_info (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): - Output only. For databases that are using customer managed - encryption, this field contains the encryption information - for the database, such as all Cloud KMS key versions that - are in use. The - ``encryption_status' field inside of each``\ EncryptionInfo\` - is not populated. - - For databases that are using Google default or other types - of encryption, this field is empty. - - This field is propagated lazily from the backend. There - might be a delay from when a key version is being used and - when it appears in this field. - version_retention_period (str): - Output only. The period in which Cloud Spanner retains all - versions of data for the database. This is the same as the - value of version_retention_period database option set using - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - Defaults to 1 hour, if not set. - earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Earliest timestamp at which - older versions of the data can be read. This - value is continuously updated by Cloud Spanner - and becomes stale the moment it is queried. If - you are using this value to recover data, make - sure to account for the time from the moment - when the value is queried to the moment when you - initiate the recovery. - default_leader (str): - Output only. The read-write region which contains the - database's leader replicas. - - This is the same as the value of default_leader database - option set using DatabaseAdmin.CreateDatabase or - DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this - is empty. - database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): - Output only. The dialect of the Cloud Spanner - Database. - enable_drop_protection (bool): - Whether drop protection is enabled for this database. - Defaults to false, if not set. For more details, please see - how to `prevent accidental database - deletion `__. - reconciling (bool): - Output only. If true, the database is being - updated. If false, there are no ongoing update - operations for the database. - """ - class State(proto.Enum): - r"""Indicates the current state of the database. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The database is still being created. Operations on the - database may fail with ``FAILED_PRECONDITION`` in this - state. - READY (2): - The database is fully created and ready for - use. - READY_OPTIMIZING (3): - The database is fully created and ready for use, but is - still being optimized for performance and cannot handle full - load. - - In this state, the database still references the backup it - was restore from, preventing the backup from being deleted. - When optimizations are complete, the full performance of the - database will be restored, and the database will transition - to ``READY`` state. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - READY_OPTIMIZING = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - state: State = proto.Field( - proto.ENUM, - number=2, - enum=State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - restore_info: 'RestoreInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='RestoreInfo', - ) - encryption_config: common.EncryptionConfig = proto.Field( - proto.MESSAGE, - number=5, - message=common.EncryptionConfig, - ) - encryption_info: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=common.EncryptionInfo, - ) - version_retention_period: str = proto.Field( - proto.STRING, - number=6, - ) - earliest_version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - default_leader: str = proto.Field( - proto.STRING, - number=9, - ) - database_dialect: common.DatabaseDialect = proto.Field( - proto.ENUM, - number=10, - enum=common.DatabaseDialect, - ) - enable_drop_protection: bool = proto.Field( - proto.BOOL, - number=11, - ) - reconciling: bool = proto.Field( - proto.BOOL, - number=12, - ) - - -class ListDatabasesRequest(proto.Message): - r"""The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Attributes: - parent (str): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - page_size (int): - Number of databases to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] - from a previous - [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDatabasesResponse(proto.Message): - r"""The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Attributes: - databases (MutableSequence[google.cloud.spanner_admin_database_v1.types.Database]): - Databases that matched the request. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] - call to fetch more of the matching databases. - """ - - @property - def raw_page(self): - return self - - databases: MutableSequence['Database'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Database', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - - Attributes: - parent (str): - Required. The name of the instance that will serve the new - database. Values are of the form - ``projects//instances/``. - create_statement (str): - Required. A ``CREATE DATABASE`` statement, which specifies - the ID of the new database. The database ID must conform to - the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be - between 2 and 30 characters in length. If the database ID is - a reserved word or if it contains a hyphen, the database ID - must be enclosed in backticks (:literal:`\``). - extra_statements (MutableSequence[str]): - Optional. A list of DDL statements to run - inside the newly created database. Statements - can create tables, indexes, etc. These - statements execute atomically with the creation - of the database: - - if there is an error in any statement, the - database is not created. - encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): - Optional. The encryption configuration for - the database. If this field is not specified, - Cloud Spanner will encrypt/decrypt all data at - rest using Google default encryption. - database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): - Optional. The dialect of the Cloud Spanner - Database. - proto_descriptors (bytes): - Optional. Proto descriptors used by CREATE/ALTER PROTO - BUNDLE statements in 'extra_statements' above. Contains a - protobuf-serialized - `google.protobuf.FileDescriptorSet `__. - To generate it, - `install `__ and - run ``protoc`` with --include_imports and - --descriptor_set_out. For example, to generate for - moon/shot/app.proto, run - - :: - - $protoc --proto_path=/app_path --proto_path=/lib_path \ - --include_imports \ - --descriptor_set_out=descriptors.data \ - moon/shot/app.proto - - For more details, see protobuffer `self - description `__. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - create_statement: str = proto.Field( - proto.STRING, - number=2, - ) - extra_statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - encryption_config: common.EncryptionConfig = proto.Field( - proto.MESSAGE, - number=4, - message=common.EncryptionConfig, - ) - database_dialect: common.DatabaseDialect = proto.Field( - proto.ENUM, - number=5, - enum=common.DatabaseDialect, - ) - proto_descriptors: bytes = proto.Field( - proto.BYTES, - number=6, - ) - - -class CreateDatabaseMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - - Attributes: - database (str): - The database being created. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseRequest(proto.Message): - r"""The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - - Attributes: - name (str): - Required. The name of the requested database. Values are of - the form - ``projects//instances//databases/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseRequest(proto.Message): - r"""The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - - Attributes: - database (google.cloud.spanner_admin_database_v1.types.Database): - Required. The database to update. The ``name`` field of the - database is of the form - ``projects//instances//databases/``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to update. Currently, only - ``enable_drop_protection`` field can be updated. - """ - - database: 'Database' = proto.Field( - proto.MESSAGE, - number=1, - message='Database', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class UpdateDatabaseMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - - Attributes: - request (google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest): - The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is - best-effort). - """ - - request: 'UpdateDatabaseRequest' = proto.Field( - proto.MESSAGE, - number=1, - message='UpdateDatabaseRequest', - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateDatabaseDdlRequest(proto.Message): - r"""Enqueues the given DDL statements to be applied, in order but not - necessarily all at once, to the database schema at some point (or - points) in the future. The server checks that the statements are - executable (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon later execution - (e.g., if a statement from another batch of statements is applied - first and it conflicts in some way, or if there is some data-related - problem like a ``NULL`` value in a column to which ``NOT NULL`` - would be added). If a statement fails, all subsequent statements in - the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be used with - the [Operations][google.longrunning.Operations] API to monitor - progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - - Attributes: - database (str): - Required. The database to update. - statements (MutableSequence[str]): - Required. DDL statements to be applied to the - database. - operation_id (str): - If empty, the new update request is assigned an - automatically-generated operation ID. Otherwise, - ``operation_id`` is used to construct the name of the - resulting [Operation][google.longrunning.Operation]. - - Specifying an explicit operation ID simplifies determining - whether the statements were executed in the event that the - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - call is replayed, or the return value is otherwise lost: the - [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] - and ``operation_id`` fields can be combined to form the - [name][google.longrunning.Operation.name] of the resulting - [longrunning.Operation][google.longrunning.Operation]: - ``/operations/``. - - ``operation_id`` should be unique within the database, and - must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that - automatically-generated operation IDs always begin with an - underscore. If the named operation already exists, - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - returns ``ALREADY_EXISTS``. - proto_descriptors (bytes): - Optional. Proto descriptors used by CREATE/ALTER PROTO - BUNDLE statements. Contains a protobuf-serialized - `google.protobuf.FileDescriptorSet `__. - To generate it, - `install `__ and - run ``protoc`` with --include_imports and - --descriptor_set_out. For example, to generate for - moon/shot/app.proto, run - - :: - - $protoc --proto_path=/app_path --proto_path=/lib_path \ - --include_imports \ - --descriptor_set_out=descriptors.data \ - moon/shot/app.proto - - For more details, see protobuffer `self - description `__. - throughput_mode (bool): - Optional. This field is exposed to be used by the Spanner - Migration Tool. For more details, see - `SMT `__. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - operation_id: str = proto.Field( - proto.STRING, - number=3, - ) - proto_descriptors: bytes = proto.Field( - proto.BYTES, - number=4, - ) - throughput_mode: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DdlStatementActionInfo(proto.Message): - r"""Action information extracted from a DDL statement. This proto is - used to display the brief info of the DDL statement for the - operation - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - - Attributes: - action (str): - The action for the DDL statement, e.g. - CREATE, ALTER, DROP, GRANT, etc. This field is a - non-empty string. - entity_type (str): - The entity type for the DDL statement, e.g. TABLE, INDEX, - VIEW, etc. This field can be empty string for some DDL - statement, e.g. for statement "ANALYZE", ``entity_type`` = - "". - entity_names (MutableSequence[str]): - The entity name(s) being operated on the DDL statement. E.g. - - 1. For statement "CREATE TABLE t1(...)", ``entity_names`` = - ["t1"]. - 2. For statement "GRANT ROLE r1, r2 ...", ``entity_names`` = - ["r1", "r2"]. - 3. For statement "ANALYZE", ``entity_names`` = []. - """ - - action: str = proto.Field( - proto.STRING, - number=1, - ) - entity_type: str = proto.Field( - proto.STRING, - number=2, - ) - entity_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateDatabaseDdlMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - - Attributes: - database (str): - The database being modified. - statements (MutableSequence[str]): - For an update this list contains all the - statements. For an individual statement, this - list contains only that statement. - commit_timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): - Reports the commit timestamps of all statements that have - succeeded so far, where ``commit_timestamps[i]`` is the - commit timestamp for the statement ``statements[i]``. - throttled (bool): - Output only. When true, indicates that the - operation is throttled e.g. due to resource - constraints. When resources become available the - operation will resume and this field will be - false again. - progress (MutableSequence[google.cloud.spanner_admin_database_v1.types.OperationProgress]): - The progress of the - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - operations. All DDL statements will have continuously - updating progress, and ``progress[i]`` is the operation - progress for ``statements[i]``. Also, ``progress[i]`` will - have start time and end time populated with commit timestamp - of operation, as well as a progress of 100% once the - operation has completed. - actions (MutableSequence[google.cloud.spanner_admin_database_v1.types.DdlStatementActionInfo]): - The brief action info for the DDL statements. ``actions[i]`` - is the brief info for ``statements[i]``. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - commit_timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - throttled: bool = proto.Field( - proto.BOOL, - number=4, - ) - progress: MutableSequence[common.OperationProgress] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=common.OperationProgress, - ) - actions: MutableSequence['DdlStatementActionInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='DdlStatementActionInfo', - ) - - -class DropDatabaseRequest(proto.Message): - r"""The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - - Attributes: - database (str): - Required. The database to be dropped. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseDdlRequest(proto.Message): - r"""The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - Attributes: - database (str): - Required. The database whose schema we wish to get. Values - are of the form - ``projects//instances//databases/`` - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseDdlResponse(proto.Message): - r"""The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - Attributes: - statements (MutableSequence[str]): - A list of formatted DDL statements defining - the schema of the database specified in the - request. - proto_descriptors (bytes): - Proto descriptors stored in the database. Contains a - protobuf-serialized - `google.protobuf.FileDescriptorSet `__. - For more details, see protobuffer `self - description `__. - """ - - statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - proto_descriptors: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class ListDatabaseOperationsRequest(proto.Message): - r"""The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Attributes: - parent (str): - Required. The instance of the database operations. Values - are of the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Operation][google.longrunning.Operation] are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata] - is - ``type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first, if filtering - on metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND`` - ``(metadata.source_type:BACKUP) AND`` - ``(metadata.backup_info.backup:backup_howl) AND`` - ``(metadata.name:restored_howl) AND`` - ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - - The database is restored from a backup. - - The backup name contains "backup_howl". - - The restored database's name contains "restored_howl". - - The operation started before 2018-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabaseOperationsResponse.next_page_token] - from a previous - [ListDatabaseOperationsResponse][google.spanner.admin.database.v1.ListDatabaseOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDatabaseOperationsResponse(proto.Message): - r"""The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching database [long-running - operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the database's name. The - operation's - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RestoreDatabaseRequest(proto.Message): - r"""The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The name of the instance in which to create the - restored database. This instance must be in the same project - and have the same instance configuration as the instance - containing the source backup. Values are of the form - ``projects//instances/``. - database_id (str): - Required. The id of the database to create and restore to. - This database must not already exist. The ``database_id`` - appended to ``parent`` forms the full database name of the - form - ``projects//instances//databases/``. - backup (str): - Name of the backup from which to restore. Values are of the - form - ``projects//instances//backups/``. - - This field is a member of `oneof`_ ``source``. - encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): - Optional. An encryption configuration describing the - encryption type and key resources in Cloud KMS used to - encrypt/decrypt the database to restore to. If this field is - not specified, the restored database will use the same - encryption configuration as the backup by default, namely - [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup: str = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - encryption_config: 'RestoreDatabaseEncryptionConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='RestoreDatabaseEncryptionConfig', - ) - - -class RestoreDatabaseEncryptionConfig(proto.Message): - r"""Encryption configuration for the restored database. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): - Required. The encryption type of the restored - database. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to - encrypt/decrypt the restored database. This field should be - set only when - [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Optional. Specifies the KMS configuration for the one or - more keys used to encrypt the database. Values are of the - form - ``projects//locations//keyRings//cryptoKeys/``. - - The keys referenced by kms_key_names must fully cover all - regions of the database instance configuration. Some - examples: - - - For single region database instance configs, specify a - single regional location KMS key. - - For multi-regional database instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For a database instance config of type USER_MANAGED, - please specify only regional location KMS keys to cover - each region in the instance config. Multi-regional - location KMS keys are not supported for USER_MANAGED - instance configs. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the database to be restored. - - Values: - ENCRYPTION_TYPE_UNSPECIFIED (0): - Unspecified. Do not use. - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): - This is the default option when - [encryption_config][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig] - is not specified. - GOOGLE_DEFAULT_ENCRYPTION (2): - Use Google default encryption. - CUSTOMER_MANAGED_ENCRYPTION (3): - Use customer managed encryption. If specified, - ``kms_key_name`` must must contain a valid Cloud KMS key. - """ - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type: EncryptionType = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class RestoreDatabaseMetadata(proto.Message): - r"""Metadata type for the long-running operation returned by - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Name of the database being created and - restored to. - source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): - The type of the restore source. - backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): - Information about the backup used to restore - the database. - - This field is a member of `oneof`_ ``source_info``. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of this operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - optimize_database_operation_name (str): - If exists, the name of the long-running operation that will - be used to track the post-restore optimization process to - optimize the performance of the restored database, and - remove the dependency on the restore source. The name is of - the form - ``projects//instances//databases//operations/`` - where the is the name of database being created and restored - to. The metadata type of the long-running operation is - [OptimizeRestoredDatabaseMetadata][google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata]. - This long-running operation will be automatically created by - the system after the RestoreDatabase long-running operation - completes successfully. This operation will not be created - if the restore was not successful. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - source_type: 'RestoreSourceType' = proto.Field( - proto.ENUM, - number=2, - enum='RestoreSourceType', - ) - backup_info: gsad_backup.BackupInfo = proto.Field( - proto.MESSAGE, - number=3, - oneof='source_info', - message=gsad_backup.BackupInfo, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=4, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - optimize_database_operation_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -class OptimizeRestoredDatabaseMetadata(proto.Message): - r"""Metadata type for the long-running operation used to track - the progress of optimizations performed on a newly restored - database. This long-running operation is automatically created - by the system after the successful completion of a database - restore, and cannot be cancelled. - - Attributes: - name (str): - Name of the restored database being - optimized. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the post-restore - optimizations. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - - -class DatabaseRole(proto.Message): - r"""A Cloud Spanner database role. - - Attributes: - name (str): - Required. The name of the database role. Values are of the - form - ``projects//instances//databases//databaseRoles/`` - where ```` is as specified in the ``CREATE ROLE`` DDL - statement. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDatabaseRolesRequest(proto.Message): - r"""The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Attributes: - parent (str): - Required. The database whose roles should be listed. Values - are of the form - ``projects//instances//databases/``. - page_size (int): - Number of database roles to be returned in - the response. If 0 or less, defaults to the - server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabaseRolesResponse.next_page_token] - from a previous - [ListDatabaseRolesResponse][google.spanner.admin.database.v1.ListDatabaseRolesResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDatabaseRolesResponse(proto.Message): - r"""The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Attributes: - database_roles (MutableSequence[google.cloud.spanner_admin_database_v1.types.DatabaseRole]): - Database roles that matched the request. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles] - call to fetch more of the matching roles. - """ - - @property - def raw_page(self): - return self - - database_roles: MutableSequence['DatabaseRole'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DatabaseRole', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AddSplitPointsRequest(proto.Message): - r"""The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - Attributes: - database (str): - Required. The database on whose tables/indexes split points - are to be added. Values are of the form - ``projects//instances//databases/``. - split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): - Required. The split points to add. - initiator (str): - Optional. A user-supplied tag associated with the split - points. For example, "intital_data_load", "special_event_1". - Defaults to "CloudAddSplitPointsAPI" if not specified. The - length of the tag must not exceed 50 characters,else will be - trimmed. Only valid UTF8 characters are allowed. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - split_points: MutableSequence['SplitPoints'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='SplitPoints', - ) - initiator: str = proto.Field( - proto.STRING, - number=3, - ) - - -class AddSplitPointsResponse(proto.Message): - r"""The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - - -class SplitPoints(proto.Message): - r"""The split points of a table/index. - - Attributes: - table (str): - The table to split. - index (str): - The index to split. If specified, the ``table`` field must - refer to the index's base table. - keys (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints.Key]): - Required. The list of split keys, i.e., the - split boundaries. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The expiration timestamp of the - split points. A timestamp in the past means - immediate expiration. The maximum value can be - 30 days in the future. Defaults to 10 days in - the future if not specified. - """ - - class Key(proto.Message): - r"""A split key. - - Attributes: - key_parts (google.protobuf.struct_pb2.ListValue): - Required. The column values making up the - split key. - """ - - key_parts: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=1, - message=struct_pb2.ListValue, - ) - - table: str = proto.Field( - proto.STRING, - number=1, - ) - index: str = proto.Field( - proto.STRING, - number=2, - ) - keys: MutableSequence[Key] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Key, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class InternalUpdateGraphOperationRequest(proto.Message): - r"""Internal request proto, do not use directly. - - Attributes: - database (str): - Internal field, do not use directly. - operation_id (str): - Internal field, do not use directly. - vm_identity_token (str): - Internal field, do not use directly. - progress (float): - Internal field, do not use directly. - status (google.rpc.status_pb2.Status): - Internal field, do not use directly. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - operation_id: str = proto.Field( - proto.STRING, - number=2, - ) - vm_identity_token: str = proto.Field( - proto.STRING, - number=5, - ) - progress: float = proto.Field( - proto.DOUBLE, - number=3, - ) - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=6, - message=status_pb2.Status, - ) - - -class InternalUpdateGraphOperationResponse(proto.Message): - r"""Internal response proto, do not use directly. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/mypy.ini b/owl-bot-staging/spanner_admin_database/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_database/v1/noxfile.py b/owl-bot-staging/spanner_admin_database/v1/noxfile.py deleted file mode 100644 index feed1a3ac8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/noxfile.py +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] - -DEFAULT_PYTHON_VERSION = "3.14" - -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): -# Switch this to Python 3.15 alpha1 -# https://peps.python.org/pep-0790/ -PREVIEW_PYTHON_VERSION = "3.14" - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-spanner-admin-database" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=PREVIEW_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json deleted file mode 100644 index e89008727d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ /dev/null @@ -1,4480 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.spanner.admin.database.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-spanner-admin-database", - "version": "0.0.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.add_split_points", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "AddSplitPoints" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "split_points", - "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", - "shortName": "add_split_points" - }, - "description": "Sample for AddSplitPoints", - "file": "spanner_v1_generated_database_admin_add_split_points_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_add_split_points_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.add_split_points", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "AddSplitPoints" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "split_points", - "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", - "shortName": "add_split_points" - }, - "description": "Sample for AddSplitPoints", - "file": "spanner_v1_generated_database_admin_add_split_points_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_add_split_points_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.copy_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CopyBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "source_backup", - "type": "str" - }, - { - "name": "expire_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "copy_backup" - }, - "description": "Sample for CopyBackup", - "file": "spanner_v1_generated_database_admin_copy_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_copy_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.copy_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CopyBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "source_backup", - "type": "str" - }, - { - "name": "expire_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "copy_backup" - }, - "description": "Sample for CopyBackup", - "file": "spanner_v1_generated_database_admin_copy_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_copy_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "backup_schedule_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "create_backup_schedule" - }, - "description": "Sample for CreateBackupSchedule", - "file": "spanner_v1_generated_database_admin_create_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "backup_schedule_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "create_backup_schedule" - }, - "description": "Sample for CreateBackupSchedule", - "file": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_backup" - }, - "description": "Sample for CreateBackup", - "file": "spanner_v1_generated_database_admin_create_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_backup" - }, - "description": "Sample for CreateBackup", - "file": "spanner_v1_generated_database_admin_create_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "create_statement", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "spanner_v1_generated_database_admin_create_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "create_statement", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "spanner_v1_generated_database_admin_create_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup_schedule" - }, - "description": "Sample for DeleteBackupSchedule", - "file": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup_schedule" - }, - "description": "Sample for DeleteBackupSchedule", - "file": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "spanner_v1_generated_database_admin_delete_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "spanner_v1_generated_database_admin_delete_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.drop_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DropDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "drop_database" - }, - "description": "Sample for DropDatabase", - "file": "spanner_v1_generated_database_admin_drop_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_drop_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.drop_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DropDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "drop_database" - }, - "description": "Sample for DropDatabase", - "file": "spanner_v1_generated_database_admin_drop_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_drop_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "get_backup_schedule" - }, - "description": "Sample for GetBackupSchedule", - "file": "spanner_v1_generated_database_admin_get_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "get_backup_schedule" - }, - "description": "Sample for GetBackupSchedule", - "file": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "spanner_v1_generated_database_admin_get_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "spanner_v1_generated_database_admin_get_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", - "shortName": "get_database_ddl" - }, - "description": "Sample for GetDatabaseDdl", - "file": "spanner_v1_generated_database_admin_get_database_ddl_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_ddl_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", - "shortName": "get_database_ddl" - }, - "description": "Sample for GetDatabaseDdl", - "file": "spanner_v1_generated_database_admin_get_database_ddl_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_ddl_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "spanner_v1_generated_database_admin_get_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "spanner_v1_generated_database_admin_get_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_database_admin_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_database_admin_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.internal_update_graph_operation", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "InternalUpdateGraphOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "operation_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", - "shortName": "internal_update_graph_operation" - }, - "description": "Sample for InternalUpdateGraphOperation", - "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.internal_update_graph_operation", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "InternalUpdateGraphOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "operation_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", - "shortName": "internal_update_graph_operation" - }, - "description": "Sample for InternalUpdateGraphOperation", - "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager", - "shortName": "list_backup_operations" - }, - "description": "Sample for ListBackupOperations", - "file": "spanner_v1_generated_database_admin_list_backup_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager", - "shortName": "list_backup_operations" - }, - "description": "Sample for ListBackupOperations", - "file": "spanner_v1_generated_database_admin_list_backup_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_schedules", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupSchedules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager", - "shortName": "list_backup_schedules" - }, - "description": "Sample for ListBackupSchedules", - "file": "spanner_v1_generated_database_admin_list_backup_schedules_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_schedules_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_schedules", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupSchedules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager", - "shortName": "list_backup_schedules" - }, - "description": "Sample for ListBackupSchedules", - "file": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backups", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "spanner_v1_generated_database_admin_list_backups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backups", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "spanner_v1_generated_database_admin_list_backups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager", - "shortName": "list_database_operations" - }, - "description": "Sample for ListDatabaseOperations", - "file": "spanner_v1_generated_database_admin_list_database_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager", - "shortName": "list_database_operations" - }, - "description": "Sample for ListDatabaseOperations", - "file": "spanner_v1_generated_database_admin_list_database_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_roles", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseRoles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager", - "shortName": "list_database_roles" - }, - "description": "Sample for ListDatabaseRoles", - "file": "spanner_v1_generated_database_admin_list_database_roles_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_roles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_roles", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseRoles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager", - "shortName": "list_database_roles" - }, - "description": "Sample for ListDatabaseRoles", - "file": "spanner_v1_generated_database_admin_list_database_roles_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_roles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_databases", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "spanner_v1_generated_database_admin_list_databases_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_databases_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_databases", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "spanner_v1_generated_database_admin_list_databases_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_databases_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.restore_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "RestoreDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "backup", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restore_database" - }, - "description": "Sample for RestoreDatabase", - "file": "spanner_v1_generated_database_admin_restore_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_restore_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.restore_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "RestoreDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "backup", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restore_database" - }, - "description": "Sample for RestoreDatabase", - "file": "spanner_v1_generated_database_admin_restore_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_restore_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_database_admin_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_database_admin_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_database_admin_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "update_backup_schedule" - }, - "description": "Sample for UpdateBackupSchedule", - "file": "spanner_v1_generated_database_admin_update_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "update_backup_schedule" - }, - "description": "Sample for UpdateBackupSchedule", - "file": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "update_backup" - }, - "description": "Sample for UpdateBackup", - "file": "spanner_v1_generated_database_admin_update_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "update_backup" - }, - "description": "Sample for UpdateBackup", - "file": "spanner_v1_generated_database_admin_update_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "statements", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_database_ddl" - }, - "description": "Sample for UpdateDatabaseDdl", - "file": "spanner_v1_generated_database_admin_update_database_ddl_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_ddl_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "statements", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_database_ddl" - }, - "description": "Sample for UpdateDatabaseDdl", - "file": "spanner_v1_generated_database_admin_update_database_ddl_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_ddl_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.spanner_admin_database_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "spanner_v1_generated_database_admin_update_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.spanner_admin_database_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "spanner_v1_generated_database_admin_update_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_sync.py" - } - ] -} diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py deleted file mode 100644 index ff6fcfe598..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AddSplitPoints -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = await client.add_split_points(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py deleted file mode 100644 index 3819bbe986..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AddSplitPoints -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = client.add_split_points(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py deleted file mode 100644 index d885947bb5..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CopyBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py deleted file mode 100644 index a571e058c9..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CopyBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py deleted file mode 100644 index 2ad8881f54..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py deleted file mode 100644 index efdcc2457e..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py deleted file mode 100644 index 60d4b50c3b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py deleted file mode 100644 index 02b9d1f0e7..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py deleted file mode 100644 index 47399a8d40..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py deleted file mode 100644 index 6f112cd8a7..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py deleted file mode 100644 index ab10785105..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py deleted file mode 100644 index 591d45cb10..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py deleted file mode 100644 index 720417ba65..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py deleted file mode 100644 index 736dc56a23..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py deleted file mode 100644 index 15f279b72d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DropDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - await client.drop_database(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py deleted file mode 100644 index f218cabd83..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DropDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - client.drop_database(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py deleted file mode 100644 index 58b93a119a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py deleted file mode 100644 index 5a37eec975..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py deleted file mode 100644 index 4006cac333..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py deleted file mode 100644 index 16cffcd78d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py deleted file mode 100644 index fd8621c27b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py deleted file mode 100644 index 8e84b21f78..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = await client.get_database_ddl(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py deleted file mode 100644 index 495b557a55..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = client.get_database_ddl(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py deleted file mode 100644 index ab729bb9e3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py deleted file mode 100644 index d5d75de78b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py deleted file mode 100644 index 75e0b48b1b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py deleted file mode 100644 index 556205a0aa..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InternalUpdateGraphOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = await client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py deleted file mode 100644 index 46f1a3c88f..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InternalUpdateGraphOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py deleted file mode 100644 index a56ec9f80e..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py deleted file mode 100644 index 6383e1b247..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py deleted file mode 100644 index 25ac53891a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupSchedules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py deleted file mode 100644 index 89cf82d278..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupSchedules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py deleted file mode 100644 index 140e519e07..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackups_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py deleted file mode 100644 index 9f04036f74..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackups_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py deleted file mode 100644 index 3bc614b232..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py deleted file mode 100644 index 3d4dc965a9..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py deleted file mode 100644 index 46ec91ce89..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseRoles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py deleted file mode 100644 index d39e4759dd..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseRoles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py deleted file mode 100644 index 586dfa56f1..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py deleted file mode 100644 index e6ef221af6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py deleted file mode 100644 index 384c063c61..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py deleted file mode 100644 index a327a8ae13..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py deleted file mode 100644 index edade4c950..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py deleted file mode 100644 index 28a6746f4a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py deleted file mode 100644 index 0e6ea91cb3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py deleted file mode 100644 index 3fd0316dc1..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py deleted file mode 100644 index 95fa2a63f6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = await client.update_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py deleted file mode 100644 index de17dfc86e..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py deleted file mode 100644 index 4ef64a0673..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py deleted file mode 100644 index 9dbb0148dc..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = client.update_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py deleted file mode 100644 index d5588c3036..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py deleted file mode 100644 index ad98e2da9c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py deleted file mode 100644 index 73297524b9..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py deleted file mode 100644 index 62ed40bc84..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py deleted file mode 100644 index d642e9a0e3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py +++ /dev/null @@ -1,202 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spanner_admin_databaseCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'add_split_points': ('database', 'split_points', 'initiator', ), - 'copy_backup': ('parent', 'backup_id', 'source_backup', 'expire_time', 'encryption_config', ), - 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), - 'create_backup_schedule': ('parent', 'backup_schedule_id', 'backup_schedule', ), - 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', 'database_dialect', 'proto_descriptors', ), - 'delete_backup': ('name', ), - 'delete_backup_schedule': ('name', ), - 'drop_database': ('database', ), - 'get_backup': ('name', ), - 'get_backup_schedule': ('name', ), - 'get_database': ('name', ), - 'get_database_ddl': ('database', ), - 'get_iam_policy': ('resource', 'options', ), - 'internal_update_graph_operation': ('database', 'operation_id', 'vm_identity_token', 'progress', 'status', ), - 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_backup_schedules': ('parent', 'page_size', 'page_token', ), - 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_database_roles': ('parent', 'page_size', 'page_token', ), - 'list_databases': ('parent', 'page_size', 'page_token', ), - 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_backup': ('backup', 'update_mask', ), - 'update_backup_schedule': ('backup_schedule', 'update_mask', ), - 'update_database': ('database', 'update_mask', ), - 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', 'throughput_mode', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spanner_admin_databaseCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner_admin_database client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_database/v1/setup.py b/owl-bot-staging/spanner_admin_database/v1/setup.py deleted file mode 100644 index 64017e80e7..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/setup.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-spanner-admin-database' - - -description = "Google Cloud Spanner Admin Database API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/spanner_admin_database/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "grpcio >= 1.33.2, < 2.0.0", - "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-database" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt deleted file mode 100644 index 5b1ee6c35a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,13 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py deleted file mode 100644 index 0ba71c42e0..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ /dev/null @@ -1,22226 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import re -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminAsyncClient -from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminClient -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.services.database_admin import transports -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DatabaseAdminClient._get_default_mtls_endpoint(None) is None - assert DatabaseAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DatabaseAdminClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DatabaseAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DatabaseAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DatabaseAdminClient._get_client_cert_source(None, False) is None - assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DatabaseAdminClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE - default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DatabaseAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatabaseAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DatabaseAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DatabaseAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DatabaseAdminClient._get_universe_domain(None, None) == DatabaseAdminClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DatabaseAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DatabaseAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DatabaseAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatabaseAdminClient, "grpc"), - (DatabaseAdminAsyncClient, "grpc_asyncio"), - (DatabaseAdminClient, "rest"), -]) -def test_database_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DatabaseAdminGrpcTransport, "grpc"), - (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DatabaseAdminRestTransport, "rest"), -]) -def test_database_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatabaseAdminClient, "grpc"), - (DatabaseAdminAsyncClient, "grpc_asyncio"), - (DatabaseAdminClient, "rest"), -]) -def test_database_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -def test_database_admin_client_get_transport_class(): - transport = DatabaseAdminClient.get_transport_class() - available_transports = [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminRestTransport, - ] - assert transport in available_transports - - transport = DatabaseAdminClient.get_transport_class("grpc") - assert transport == transports.DatabaseAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), -]) -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -def test_database_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "true"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "false"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "true"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "false"), -]) -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_database_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DatabaseAdminClient, DatabaseAdminAsyncClient -]) -@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) -def test_database_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DatabaseAdminClient, DatabaseAdminAsyncClient -]) -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -def test_database_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE - default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), -]) -def test_database_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", None), -]) -def test_database_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_database_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DatabaseAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_database_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=None, - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabasesRequest, - dict, -]) -def test_list_databases(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_databases_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_databases(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_databases_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_databases in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_databases] = mock_rpc - - request = {} - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabasesRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - -def test_list_databases_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_databases_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_databases_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_databases(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in results) -def test_list_databases_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_databases(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_databases_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_databases(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.CreateDatabaseRequest, - dict, -]) -def test_create_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.CreateDatabaseRequest( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.CreateDatabaseRequest( - parent='parent_value', - create_statement='create_statement_value', - ) - -def test_create_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_database] = mock_rpc - - request = {} - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.CreateDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - -def test_create_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].create_statement - mock_val = 'create_statement_value' - assert arg == mock_val - - -def test_create_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].create_statement - mock_val = 'create_statement_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseRequest, - dict, -]) -def test_get_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True - - -def test_get_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.GetDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseRequest( - name='name_value', - ) - -def test_get_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_database] = mock_rpc - - request = {} - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - )) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - -def test_get_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = spanner_database_admin.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseRequest, - dict, -]) -def test_update_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.UpdateDatabaseRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseRequest( - ) - -def test_update_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_database] = mock_rpc - - request = {} - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_async_from_dict(): - await test_update_database_async(request_type=dict) - -def test_update_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -def test_update_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database( - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = spanner_database_admin.Database(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database( - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = spanner_database_admin.Database(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseDdlRequest, - dict, -]) -def test_update_database_ddl(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_ddl_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.UpdateDatabaseDdlRequest( - database='database_value', - operation_id='operation_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_database_ddl(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest( - database='database_value', - operation_id='operation_id_value', - ) - -def test_update_database_ddl_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc - request = {} - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_database_ddl in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_database_ddl] = mock_rpc - - request = {} - await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_ddl_async_from_dict(): - await test_update_database_ddl_async(request_type=dict) - -def test_update_database_ddl_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_ddl_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_update_database_ddl_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database_ddl( - database='database_value', - statements=['statements_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].statements - mock_val = ['statements_value'] - assert arg == mock_val - - -def test_update_database_ddl_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - -@pytest.mark.asyncio -async def test_update_database_ddl_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database_ddl( - database='database_value', - statements=['statements_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].statements - mock_val = ['statements_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_ddl_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.DropDatabaseRequest, - dict, -]) -def test_drop_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.DropDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_drop_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.DropDatabaseRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.drop_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.DropDatabaseRequest( - database='database_value', - ) - -def test_drop_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.drop_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc - request = {} - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.drop_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_drop_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.drop_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.drop_database] = mock_rpc - - request = {} - await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.drop_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_drop_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.DropDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.DropDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_drop_database_async_from_dict(): - await test_drop_database_async(request_type=dict) - -def test_drop_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.DropDatabaseRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = None - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_drop_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.DropDatabaseRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_drop_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.drop_database( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_drop_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_drop_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.drop_database( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_drop_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseDdlRequest, - dict, -]) -def test_get_database_ddl(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - ) - response = client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - assert response.proto_descriptors == b'proto_descriptors_blob' - - -def test_get_database_ddl_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.GetDatabaseDdlRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_database_ddl(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseDdlRequest( - database='database_value', - ) - -def test_get_database_ddl_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc - request = {} - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_database_ddl in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_database_ddl] = mock_rpc - - request = {} - await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseDdlRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - )) - response = await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - assert response.proto_descriptors == b'proto_descriptors_blob' - - -@pytest.mark.asyncio -async def test_get_database_ddl_async_from_dict(): - await test_get_database_ddl_async(request_type=dict) - -def test_get_database_ddl_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_ddl_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) - await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_get_database_ddl_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database_ddl( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_get_database_ddl_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_get_database_ddl_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database_ddl( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_ddl_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - - -def test_test_iam_permissions_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.CreateBackupRequest, - dict, -]) -def test_create_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup.CreateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup.CreateBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.CreateBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - ) - -def test_create_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc - request = {} - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup] = mock_rpc - - request = {} - await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.CreateBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup.CreateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_backup_async_from_dict(): - await test_create_backup_async(request_type=dict) - -def test_create_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.CreateBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.CreateBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - - -def test_create_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup.CopyBackupRequest, - dict, -]) -def test_copy_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.CopyBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_copy_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.CopyBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.copy_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.CopyBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - ) - -def test_copy_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.copy_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc - request = {} - client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.copy_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_copy_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.copy_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.copy_backup] = mock_rpc - - request = {} - await client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.copy_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_copy_backup_async(transport: str = 'grpc_asyncio', request_type=backup.CopyBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.CopyBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_copy_backup_async_from_dict(): - await test_copy_backup_async(request_type=dict) - -def test_copy_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.CopyBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_copy_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.CopyBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_copy_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.copy_backup( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - arg = args[0].source_backup - mock_val = 'source_backup_value' - assert arg == mock_val - assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) - - -def test_copy_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.copy_backup( - backup.CopyBackupRequest(), - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - -@pytest.mark.asyncio -async def test_copy_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.copy_backup( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - arg = args[0].source_backup - mock_val = 'source_backup_value' - assert arg == mock_val - assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) - -@pytest.mark.asyncio -async def test_copy_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.copy_backup( - backup.CopyBackupRequest(), - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.parametrize("request_type", [ - backup.GetBackupRequest, - dict, -]) -def test_get_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.GetBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.GetBackupRequest( - name='name_value', - ) - -def test_get_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup] = mock_rpc - - request = {} - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backup.GetBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - response = await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) - -def test_get_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backup.Backup() - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.UpdateBackupRequest, - dict, -]) -def test_update_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - response = client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup.UpdateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -def test_update_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup.UpdateBackupRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.UpdateBackupRequest( - ) - -def test_update_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc - request = {} - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_backup] = mock_rpc - - request = {} - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.UpdateBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - response = await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup.UpdateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) - -def test_update_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.UpdateBackupRequest() - - request.backup.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = gsad_backup.Backup() - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.UpdateBackupRequest() - - request.backup.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backup.DeleteBackupRequest, - dict, -]) -def test_delete_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.DeleteBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.DeleteBackupRequest( - name='name_value', - ) - -def test_delete_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup] = mock_rpc - - request = {} - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backup.DeleteBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) - -def test_delete_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = None - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupsRequest, - dict, -]) -def test_list_backups(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.ListBackupsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_backups_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backups in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backups] = mock_rpc - - request = {} - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) - -def test_list_backups_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backup.ListBackupsResponse() - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backups_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backups_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backups_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backups_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backups(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup.Backup) - for i in results) -def test_list_backups_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backups_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backup.Backup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backups_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.RestoreDatabaseRequest, - dict, -]) -def test_restore_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.RestoreDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.restore_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.RestoreDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - -def test_restore_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc - request = {} - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.restore_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.restore_database] = mock_rpc - - request = {} - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.RestoreDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_database_async_from_dict(): - await test_restore_database_async(request_type=dict) - -def test_restore_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.RestoreDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restore_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.RestoreDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_restore_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.restore_database( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - assert args[0].backup == 'backup_value' - - -def test_restore_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - -@pytest.mark.asyncio -async def test_restore_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.restore_database( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - assert args[0].backup == 'backup_value' - -@pytest.mark.asyncio -async def test_restore_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseOperationsRequest, - dict, -]) -def test_list_database_operations(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_database_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.ListDatabaseOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_database_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_database_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc - request = {} - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_database_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_database_operations] = mock_rpc - - request = {} - await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_database_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseOperationsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_database_operations_async_from_dict(): - await test_list_database_operations_async(request_type=dict) - -def test_list_database_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_database_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) - await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_database_operations_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_database_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_database_operations_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_database_operations_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_database_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_database_operations_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - - -def test_list_database_operations_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_database_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_database_operations_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_database_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_database_operations_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_database_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_database_operations_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_database_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupOperationsRequest, - dict, -]) -def test_list_backup_operations(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.ListBackupOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backup_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.ListBackupOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_backup_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc - request = {} - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_operations] = mock_rpc - - request = {} - await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_operations_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupOperationsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.ListBackupOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backup_operations_async_from_dict(): - await test_list_backup_operations_async(request_type=dict) - -def test_list_backup_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = backup.ListBackupOperationsResponse() - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) - await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_operations_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_operations_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_operations_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_operations_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_operations_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_backup_operations_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_operations_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_operations_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseRolesRequest, - dict, -]) -def test_list_database_roles(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseRolesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_database_roles_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.ListDatabaseRolesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_database_roles(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseRolesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_database_roles_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_roles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc - request = {} - client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_roles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_roles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_database_roles in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_database_roles] = mock_rpc - - request = {} - await client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_database_roles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_roles_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseRolesRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseRolesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_database_roles_async_from_dict(): - await test_list_database_roles_async(request_type=dict) - -def test_list_database_roles_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseRolesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_database_roles_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseRolesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) - await client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_database_roles_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_database_roles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_database_roles_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_database_roles_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_database_roles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_database_roles_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), - parent='parent_value', - ) - - -def test_list_database_roles_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_database_roles(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) - for i in results) -def test_list_database_roles_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - pages = list(client.list_database_roles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_database_roles_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_database_roles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_database_roles_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_database_roles(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.AddSplitPointsRequest, - dict, -]) -def test_add_split_points(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.AddSplitPointsResponse( - ) - response = client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.AddSplitPointsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) - - -def test_add_split_points_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.AddSplitPointsRequest( - database='database_value', - initiator='initiator_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.add_split_points(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.AddSplitPointsRequest( - database='database_value', - initiator='initiator_value', - ) - -def test_add_split_points_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.add_split_points in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc - request = {} - client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.add_split_points(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_add_split_points_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.add_split_points in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.add_split_points] = mock_rpc - - request = {} - await client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.add_split_points(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_add_split_points_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.AddSplitPointsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( - )) - response = await client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.AddSplitPointsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) - - -@pytest.mark.asyncio -async def test_add_split_points_async_from_dict(): - await test_add_split_points_async(request_type=dict) - -def test_add_split_points_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.AddSplitPointsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value = spanner_database_admin.AddSplitPointsResponse() - client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_add_split_points_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.AddSplitPointsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) - await client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_add_split_points_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.AddSplitPointsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.add_split_points( - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].split_points - mock_val = [spanner_database_admin.SplitPoints(table='table_value')] - assert arg == mock_val - - -def test_add_split_points_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.add_split_points( - spanner_database_admin.AddSplitPointsRequest(), - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - -@pytest.mark.asyncio -async def test_add_split_points_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.AddSplitPointsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.add_split_points( - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].split_points - mock_val = [spanner_database_admin.SplitPoints(table='table_value')] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_add_split_points_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.add_split_points( - spanner_database_admin.AddSplitPointsRequest(), - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.CreateBackupScheduleRequest, - dict, -]) -def test_create_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - response = client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup_schedule.CreateBackupScheduleRequest( - parent='parent_value', - backup_schedule_id='backup_schedule_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest( - parent='parent_value', - backup_schedule_id='backup_schedule_id_value', - ) - -def test_create_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc - request = {} - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup_schedule] = mock_rpc - - request = {} - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.CreateBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - response = await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_from_dict(): - await test_create_backup_schedule_async(request_type=dict) - -def test_create_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.CreateBackupScheduleRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.CreateBackupScheduleRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_schedule( - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].backup_schedule_id - mock_val = 'backup_schedule_id_value' - assert arg == mock_val - - -def test_create_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - gsad_backup_schedule.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_schedule( - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].backup_schedule_id - mock_val = 'backup_schedule_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_schedule( - gsad_backup_schedule.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.GetBackupScheduleRequest, - dict, -]) -def test_get_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.BackupSchedule( - name='name_value', - ) - response = client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup_schedule.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup_schedule.GetBackupScheduleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup_schedule.GetBackupScheduleRequest( - name='name_value', - ) - -def test_get_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc - request = {} - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup_schedule] = mock_rpc - - request = {} - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.GetBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( - name='name_value', - )) - response = await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup_schedule.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_from_dict(): - await test_get_backup_schedule_async(request_type=dict) - -def test_get_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.GetBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = backup_schedule.BackupSchedule() - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.GetBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - backup_schedule.GetBackupScheduleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_schedule( - backup_schedule.GetBackupScheduleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.UpdateBackupScheduleRequest, - dict, -]) -def test_update_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - response = client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup_schedule.UpdateBackupScheduleRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest( - ) - -def test_update_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc - request = {} - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_backup_schedule] = mock_rpc - - request = {} - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - response = await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_from_dict(): - await test_update_backup_schedule_async(request_type=dict) - -def test_update_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - - request.backup_schedule.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_schedule.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - - request.backup_schedule.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_schedule.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup_schedule( - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - gsad_backup_schedule.UpdateBackupScheduleRequest(), - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup_schedule( - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup_schedule( - gsad_backup_schedule.UpdateBackupScheduleRequest(), - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.DeleteBackupScheduleRequest, - dict, -]) -def test_delete_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup_schedule.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup_schedule.DeleteBackupScheduleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup_schedule.DeleteBackupScheduleRequest( - name='name_value', - ) - -def test_delete_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc - request = {} - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup_schedule] = mock_rpc - - request = {} - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.DeleteBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup_schedule.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_from_dict(): - await test_delete_backup_schedule_async(request_type=dict) - -def test_delete_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.DeleteBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = None - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.DeleteBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - backup_schedule.DeleteBackupScheduleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_schedule( - backup_schedule.DeleteBackupScheduleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.ListBackupSchedulesRequest, - dict, -]) -def test_list_backup_schedules(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup_schedule.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupSchedulesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup_schedule.ListBackupSchedulesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_schedules(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup_schedule.ListBackupSchedulesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_backup_schedules_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_schedules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc - request = {} - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_schedules in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_schedules] = mock_rpc - - request = {} - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_schedules_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.ListBackupSchedulesRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup_schedule.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupSchedulesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_from_dict(): - await test_list_backup_schedules_async(request_type=dict) - -def test_list_backup_schedules_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.ListBackupSchedulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = backup_schedule.ListBackupSchedulesResponse() - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_schedules_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.ListBackupSchedulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_schedules_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.ListBackupSchedulesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_schedules( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_schedules_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - backup_schedule.ListBackupSchedulesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.ListBackupSchedulesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_schedules( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_schedules( - backup_schedule.ListBackupSchedulesRequest(), - parent='parent_value', - ) - - -def test_list_backup_schedules_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_schedules(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup_schedule.BackupSchedule) - for i in results) -def test_list_backup_schedules_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_schedules(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_schedules(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backup_schedule.BackupSchedule) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_schedules(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.InternalUpdateGraphOperationRequest, - dict, -]) -def test_internal_update_graph_operation(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse( - ) - response = client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.InternalUpdateGraphOperationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) - - -def test_internal_update_graph_operation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.InternalUpdateGraphOperationRequest( - database='database_value', - operation_id='operation_id_value', - vm_identity_token='vm_identity_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.internal_update_graph_operation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.InternalUpdateGraphOperationRequest( - database='database_value', - operation_id='operation_id_value', - vm_identity_token='vm_identity_token_value', - ) - -def test_internal_update_graph_operation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.internal_update_graph_operation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.internal_update_graph_operation] = mock_rpc - request = {} - client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.internal_update_graph_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.internal_update_graph_operation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.internal_update_graph_operation] = mock_rpc - - request = {} - await client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.internal_update_graph_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.InternalUpdateGraphOperationRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( - )) - response = await client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.InternalUpdateGraphOperationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) - - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_async_from_dict(): - await test_internal_update_graph_operation_async(request_type=dict) - - -def test_internal_update_graph_operation_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.internal_update_graph_operation( - database='database_value', - operation_id='operation_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].operation_id - mock_val = 'operation_id_value' - assert arg == mock_val - - -def test_internal_update_graph_operation_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.internal_update_graph_operation( - spanner_database_admin.InternalUpdateGraphOperationRequest(), - database='database_value', - operation_id='operation_id_value', - ) - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.internal_update_graph_operation( - database='database_value', - operation_id='operation_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].operation_id - mock_val = 'operation_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.internal_update_graph_operation( - spanner_database_admin.InternalUpdateGraphOperationRequest(), - database='database_value', - operation_id='operation_id_value', - ) - - -def test_list_databases_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_databases_rest_required_fields(request_type=spanner_database_admin.ListDatabasesRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_databases(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_databases_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_databases_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_databases(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) - - -def test_list_databases_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_database_admin.ListDatabasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_databases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in results) - - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_database_rest_required_fields(request_type=spanner_database_admin.CreateDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["create_statement"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["createStatement"] = 'create_statement_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "createStatement" in jsonified_request - assert jsonified_request["createStatement"] == 'create_statement_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "createStatement", ))) - - -def test_create_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - create_statement='create_statement_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) - - -def test_create_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - - -def test_get_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_rest_required_fields(request_type=spanner_database_admin.GetDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) - - -def test_get_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - - -def test_update_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("database", "updateMask", ))) - - -def test_update_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database.name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) - - -def test_update_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_database_ddl_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc - - request = {} - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_ddl_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request_init["statements"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - jsonified_request["statements"] = 'statements_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - assert "statements" in jsonified_request - assert jsonified_request["statements"] == 'statements_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database_ddl(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_ddl_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database_ddl._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "statements", ))) - - -def test_update_database_ddl_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - statements=['statements_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database_ddl(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) - - -def test_update_database_ddl_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - - -def test_drop_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.drop_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc - - request = {} - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.drop_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_drop_database_rest_required_fields(request_type=spanner_database_admin.DropDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.drop_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_drop_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.drop_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -def test_drop_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.drop_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) - - -def test_drop_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - - -def test_get_database_ddl_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc - - request = {} - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_ddl_rest_required_fields(request_type=spanner_database_admin.GetDatabaseDdlRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database_ddl(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_ddl_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database_ddl._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -def test_get_database_ddl_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database_ddl(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) - - -def test_get_database_ddl_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_set_iam_policy_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.set_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy" % client.transport._host, args[1]) - - -def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", ))) - - -def test_get_iam_policy_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy" % client.transport._host, args[1]) - - -def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_test_iam_permissions_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - permissions=['permissions_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.test_iam_permissions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions" % client.transport._host, args[1]) - - -def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_create_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc - - request = {} - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_rest_required_fields(request_type=gsad_backup.CreateBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == request_init["backup_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupId"] = 'backup_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_id", "encryption_config", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == 'backup_id_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup(request) - - expected_params = [ - ( - "backupId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupId", "encryptionConfig", )) & set(("parent", "backupId", "backup", ))) - - -def test_create_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) - - -def test_create_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - -def test_copy_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.copy_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc - - request = {} - client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.copy_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_id"] = "" - request_init["source_backup"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupId"] = 'backup_id_value' - jsonified_request["sourceBackup"] = 'source_backup_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == 'backup_id_value' - assert "sourceBackup" in jsonified_request - assert jsonified_request["sourceBackup"] == 'source_backup_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.copy_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_copy_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.copy_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "backupId", "sourceBackup", "expireTime", ))) - - -def test_copy_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.copy_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups:copy" % client.transport._host, args[1]) - - -def test_copy_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.copy_backup( - backup.CopyBackupRequest(), - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_get_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) - - -def test_get_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - - -def test_update_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc - - request = {} - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_rest_required_fields(request_type=gsad_backup.UpdateBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("backup", "updateMask", ))) - - -def test_update_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup.name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) - - -def test_update_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) - - -def test_delete_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - - -def test_list_backups_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backups(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backups_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backups_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backups(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) - - -def test_list_backups_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_backups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup.Backup) - for i in results) - - pages = list(client.list_backups(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_restore_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc - - request = {} - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restore_database_rest_required_fields(request_type=spanner_database_admin.RestoreDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["databaseId"] = 'database_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == 'database_id_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restore_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restore_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restore_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "databaseId", ))) - - -def test_restore_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - database_id='database_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.restore_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases:restore" % client.transport._host, args[1]) - - -def test_restore_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - -def test_list_database_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc - - request = {} - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_database_operations_rest_required_fields(request_type=spanner_database_admin.ListDatabaseOperationsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_database_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_database_operations_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_database_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_database_operations_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_database_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databaseOperations" % client.transport._host, args[1]) - - -def test_list_database_operations_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - - -def test_list_database_operations_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_database_admin.ListDatabaseOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_database_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_database_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_backup_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc - - request = {} - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_operations_rest_required_fields(request_type=backup.ListBackupOperationsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_operations_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backup_operations_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backupOperations" % client.transport._host, args[1]) - - -def test_list_backup_operations_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_operations_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup.ListBackupOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_backup_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_backup_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_database_roles_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_roles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc - - request = {} - client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_roles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_database_roles_rest_required_fields(request_type=spanner_database_admin.ListDatabaseRolesRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_database_roles(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_database_roles_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_database_roles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_database_roles_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_database_roles(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles" % client.transport._host, args[1]) - - -def test_list_database_roles_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), - parent='parent_value', - ) - - -def test_list_database_roles_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_database_admin.ListDatabaseRolesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - pager = client.list_database_roles(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) - for i in results) - - pages = list(client.list_database_roles(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_add_split_points_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.add_split_points in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc - - request = {} - client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.add_split_points(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_add_split_points_rest_required_fields(request_type=spanner_database_admin.AddSplitPointsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.AddSplitPointsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.add_split_points(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_add_split_points_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.add_split_points._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "splitPoints", ))) - - -def test_add_split_points_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.AddSplitPointsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.add_split_points(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints" % client.transport._host, args[1]) - - -def test_add_split_points_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.add_split_points( - spanner_database_admin.AddSplitPointsRequest(), - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - -def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc - - request = {} - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_schedule_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupScheduleId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupScheduleId" in jsonified_request - assert jsonified_request["backupScheduleId"] == request_init["backup_schedule_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupScheduleId"] = 'backup_schedule_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_schedule_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupScheduleId" in jsonified_request - assert jsonified_request["backupScheduleId"] == 'backup_schedule_id_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup_schedule(request) - - expected_params = [ - ( - "backupScheduleId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupScheduleId", )) & set(("parent", "backupScheduleId", "backupSchedule", ))) - - -def test_create_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) - - -def test_create_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - gsad_backup_schedule.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - -def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc - - request = {} - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_schedule_rest_required_fields(request_type=backup_schedule.GetBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup_schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_get_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - backup_schedule.GetBackupScheduleRequest(), - name='name_value', - ) - - -def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc - - request = {} - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("backupSchedule", "updateMask", ))) - - -def test_update_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_update_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - gsad_backup_schedule.UpdateBackupScheduleRequest(), - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc - - request = {} - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_schedule_rest_required_fields(request_type=backup_schedule.DeleteBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_delete_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - backup_schedule.DeleteBackupScheduleRequest(), - name='name_value', - ) - - -def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_schedules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc - - request = {} - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_schedules_rest_required_fields(request_type=backup_schedule.ListBackupSchedulesRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup_schedule.ListBackupSchedulesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_schedules(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_schedules_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backup_schedules_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.ListBackupSchedulesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_schedules(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) - - -def test_list_backup_schedules_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - backup_schedule.ListBackupSchedulesRequest(), - parent='parent_value', - ) - - -def test_list_backup_schedules_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup_schedule.ListBackupSchedulesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - pager = client.list_backup_schedules(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup_schedule.BackupSchedule) - for i in results) - - pages = list(client.list_backup_schedules(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_internal_update_graph_operation_rest_no_http_options(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = spanner_database_admin.InternalUpdateGraphOperationRequest() - with pytest.raises(RuntimeError): - client.internal_update_graph_operation(request) - - -def test_internal_update_graph_operation_rest_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.internal_update_graph_operation({}) - assert ( - "Method InternalUpdateGraphOperation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DatabaseAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DatabaseAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - transports.DatabaseAdminRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DatabaseAdminClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabasesResponse() - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = spanner_database_admin.Database() - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_ddl_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_drop_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = None - client.drop_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.DropDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_ddl_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - client.get_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.CreateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_copy_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.copy_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.CopyBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backup.Backup() - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = gsad_backup.Backup() - client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = None - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backup.ListBackupsResponse() - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_operations_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - client.list_database_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_operations_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = backup.ListBackupOperationsResponse() - client.list_backup_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_roles_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - client.list_database_roles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseRolesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_add_split_points_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value = spanner_database_admin.AddSplitPointsResponse() - client.add_split_points(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.AddSplitPointsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = backup_schedule.BackupSchedule() - client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = None - client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_schedules_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = backup_schedule.ListBackupSchedulesResponse() - client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_internal_update_graph_operation_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() - client.internal_update_graph_operation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DatabaseAdminAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_databases_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - await client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - )) - await client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_ddl_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_drop_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.drop_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.DropDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_ddl_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - )) - await client.get_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.CreateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_copy_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.copy_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.CopyBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - await client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - await client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backups_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( - next_page_token='next_page_token_value', - )) - await client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restore_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_database_operations_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_database_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_operations_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_backup_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_database_roles_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - )) - await client.list_database_roles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseRolesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_add_split_points_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( - )) - await client.add_split_points(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.AddSplitPointsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - await client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( - name='name_value', - )) - await client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - await client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_schedules_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - )) - await client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_internal_update_graph_operation_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( - )) - await client.internal_update_graph_operation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DatabaseAdminClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_databases_rest_bad_request(request_type=spanner_database_admin.ListDatabasesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabasesRequest, - dict, -]) -def test_list_databases_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_databases") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.ListDatabasesRequest.pb(spanner_database_admin.ListDatabasesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.ListDatabasesResponse.to_json(spanner_database_admin.ListDatabasesResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.ListDatabasesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabasesResponse() - post_with_metadata.return_value = spanner_database_admin.ListDatabasesResponse(), metadata - - client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_database_rest_bad_request(request_type=spanner_database_admin.CreateDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.CreateDatabaseRequest, - dict, -]) -def test_create_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.CreateDatabaseRequest.pb(spanner_database_admin.CreateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.CreateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_database_rest_bad_request(request_type=spanner_database_admin.GetDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseRequest, - dict, -]) -def test_get_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.GetDatabaseRequest.pb(spanner_database_admin.GetDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.Database.to_json(spanner_database_admin.Database()) - req.return_value.content = return_value - - request = spanner_database_admin.GetDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.Database() - post_with_metadata.return_value = spanner_database_admin.Database(), metadata - - client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseRequest, - dict, -]) -def test_update_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} - request_init["database"] = {'name': 'projects/sample1/instances/sample2/databases/sample3', 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'restore_info': {'source_type': 1, 'backup_info': {'backup': 'backup_value', 'version_time': {}, 'create_time': {}, 'source_database': 'source_database_value'}}, 'encryption_config': {'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'encryption_info': [{'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}], 'version_retention_period': 'version_retention_period_value', 'earliest_version_time': {}, 'default_leader': 'default_leader_value', 'database_dialect': 1, 'enable_drop_protection': True, 'reconciling': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.UpdateDatabaseRequest.pb(spanner_database_admin.UpdateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.UpdateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_ddl_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database_ddl(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseDdlRequest, - dict, -]) -def test_update_database_ddl_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database_ddl(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_ddl_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database_ddl") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.UpdateDatabaseDdlRequest.pb(spanner_database_admin.UpdateDatabaseDdlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.UpdateDatabaseDdlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_drop_database_rest_bad_request(request_type=spanner_database_admin.DropDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.drop_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.DropDatabaseRequest, - dict, -]) -def test_drop_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.drop_database(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_drop_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_drop_database") as pre: - pre.assert_not_called() - pb_message = spanner_database_admin.DropDatabaseRequest.pb(spanner_database_admin.DropDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_database_admin.DropDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.drop_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_database_ddl_rest_bad_request(request_type=spanner_database_admin.GetDatabaseDdlRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database_ddl(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseDdlRequest, - dict, -]) -def test_get_database_ddl_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database_ddl(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - assert response.proto_descriptors == b'proto_descriptors_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_ddl_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database_ddl") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.GetDatabaseDdlRequest.pb(spanner_database_admin.GetDatabaseDdlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.GetDatabaseDdlResponse.to_json(spanner_database_admin.GetDatabaseDdlResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.GetDatabaseDdlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.GetDatabaseDdlResponse() - post_with_metadata.return_value = spanner_database_admin.GetDatabaseDdlResponse(), metadata - - client.get_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_rest_bad_request(request_type=gsad_backup.CreateBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.CreateBackupRequest, - dict, -]) -def test_create_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'name_value', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup.CreateBackupRequest.pb(gsad_backup.CreateBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gsad_backup.CreateBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_copy_backup_rest_bad_request(request_type=backup.CopyBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.copy_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backup.CopyBackupRequest, - dict, -]) -def test_copy_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.copy_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_copy_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_copy_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.CopyBackupRequest.pb(backup.CopyBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backup.CopyBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.copy_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backup.GetBackupRequest, - dict, -]) -def test_get_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.Backup.to_json(backup.Backup()) - req.return_value.content = return_value - - request = backup.GetBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.Backup() - post_with_metadata.return_value = backup.Backup(), metadata - - client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_rest_bad_request(request_type=gsad_backup.UpdateBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.UpdateBackupRequest, - dict, -]) -def test_update_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} - request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'projects/sample1/instances/sample2/backups/sample3', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup.UpdateBackupRequest.pb(gsad_backup.UpdateBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gsad_backup.Backup.to_json(gsad_backup.Backup()) - req.return_value.content = return_value - - request = gsad_backup.UpdateBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gsad_backup.Backup() - post_with_metadata.return_value = gsad_backup.Backup(), metadata - - client.update_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backup.DeleteBackupRequest, - dict, -]) -def test_delete_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup") as pre: - pre.assert_not_called() - pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = backup.DeleteBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) - - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupsRequest, - dict, -]) -def test_list_backups_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backups") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.ListBackupsResponse.to_json(backup.ListBackupsResponse()) - req.return_value.content = return_value - - request = backup.ListBackupsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.ListBackupsResponse() - post_with_metadata.return_value = backup.ListBackupsResponse(), metadata - - client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_restore_database_rest_bad_request(request_type=spanner_database_admin.RestoreDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.RestoreDatabaseRequest, - dict, -]) -def test_restore_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_restore_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.RestoreDatabaseRequest.pb(spanner_database_admin.RestoreDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.RestoreDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.restore_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_database_operations_rest_bad_request(request_type=spanner_database_admin.ListDatabaseOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_database_operations(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseOperationsRequest, - dict, -]) -def test_list_database_operations_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_database_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_database_operations_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.ListDatabaseOperationsRequest.pb(spanner_database_admin.ListDatabaseOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.ListDatabaseOperationsResponse.to_json(spanner_database_admin.ListDatabaseOperationsResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.ListDatabaseOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - post_with_metadata.return_value = spanner_database_admin.ListDatabaseOperationsResponse(), metadata - - client.list_database_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_operations_rest_bad_request(request_type=backup.ListBackupOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_operations(request) - - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupOperationsRequest, - dict, -]) -def test_list_backup_operations_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_operations_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.ListBackupOperationsRequest.pb(backup.ListBackupOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.ListBackupOperationsResponse.to_json(backup.ListBackupOperationsResponse()) - req.return_value.content = return_value - - request = backup.ListBackupOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.ListBackupOperationsResponse() - post_with_metadata.return_value = backup.ListBackupOperationsResponse(), metadata - - client.list_backup_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_database_roles_rest_bad_request(request_type=spanner_database_admin.ListDatabaseRolesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_database_roles(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseRolesRequest, - dict, -]) -def test_list_database_roles_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_database_roles(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_database_roles_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_roles") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.ListDatabaseRolesRequest.pb(spanner_database_admin.ListDatabaseRolesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.ListDatabaseRolesResponse.to_json(spanner_database_admin.ListDatabaseRolesResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.ListDatabaseRolesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabaseRolesResponse() - post_with_metadata.return_value = spanner_database_admin.ListDatabaseRolesResponse(), metadata - - client.list_database_roles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_add_split_points_rest_bad_request(request_type=spanner_database_admin.AddSplitPointsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.add_split_points(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.AddSplitPointsRequest, - dict, -]) -def test_add_split_points_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.AddSplitPointsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.add_split_points(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_add_split_points_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_add_split_points") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.AddSplitPointsRequest.pb(spanner_database_admin.AddSplitPointsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.AddSplitPointsResponse.to_json(spanner_database_admin.AddSplitPointsResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.AddSplitPointsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.AddSplitPointsResponse() - post_with_metadata.return_value = spanner_database_admin.AddSplitPointsResponse(), metadata - - client.add_split_points(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.CreateBackupScheduleRequest, - dict, -]) -def test_create_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request_init["backup_schedule"] = {'name': 'name_value', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup_schedule.CreateBackupScheduleRequest.meta.fields["backup_schedule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup_schedule.CreateBackupScheduleRequest.pb(gsad_backup_schedule.CreateBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) - req.return_value.content = return_value - - request = gsad_backup_schedule.CreateBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gsad_backup_schedule.BackupSchedule() - post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata - - client.create_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_schedule_rest_bad_request(request_type=backup_schedule.GetBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.GetBackupScheduleRequest, - dict, -]) -def test_get_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup_schedule.GetBackupScheduleRequest.pb(backup_schedule.GetBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_schedule.BackupSchedule.to_json(backup_schedule.BackupSchedule()) - req.return_value.content = return_value - - request = backup_schedule.GetBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup_schedule.BackupSchedule() - post_with_metadata.return_value = backup_schedule.BackupSchedule(), metadata - - client.get_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.UpdateBackupScheduleRequest, - dict, -]) -def test_update_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} - request_init["backup_schedule"] = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup_schedule.UpdateBackupScheduleRequest.meta.fields["backup_schedule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(gsad_backup_schedule.UpdateBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) - req.return_value.content = return_value - - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gsad_backup_schedule.BackupSchedule() - post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata - - client.update_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_schedule_rest_bad_request(request_type=backup_schedule.DeleteBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.DeleteBackupScheduleRequest, - dict, -]) -def test_delete_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup_schedule") as pre: - pre.assert_not_called() - pb_message = backup_schedule.DeleteBackupScheduleRequest.pb(backup_schedule.DeleteBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = backup_schedule.DeleteBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_backup_schedules_rest_bad_request(request_type=backup_schedule.ListBackupSchedulesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_schedules(request) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.ListBackupSchedulesRequest, - dict, -]) -def test_list_backup_schedules_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_schedules(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupSchedulesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_schedules_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_schedules") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup_schedule.ListBackupSchedulesRequest.pb(backup_schedule.ListBackupSchedulesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_schedule.ListBackupSchedulesResponse.to_json(backup_schedule.ListBackupSchedulesResponse()) - req.return_value.content = return_value - - request = backup_schedule.ListBackupSchedulesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup_schedule.ListBackupSchedulesResponse() - post_with_metadata.return_value = backup_schedule.ListBackupSchedulesResponse(), metadata - - client.list_backup_schedules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_internal_update_graph_operation_rest_error(): - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.internal_update_graph_operation({}) - assert ( - "Method InternalUpdateGraphOperation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_ddl_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - client.update_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_drop_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - client.drop_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.DropDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_ddl_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - client.get_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - client.create_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.CreateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_copy_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - client.copy_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.CopyBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_operations_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - client.list_database_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_operations_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - client.list_backup_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_roles_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - client.list_database_roles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseRolesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_add_split_points_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - client.add_split_points(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.AddSplitPointsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_schedules_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_internal_update_graph_operation_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - client.internal_update_graph_operation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() - - assert args[0] == request_msg - - -def test_database_admin_rest_lro_client(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatabaseAdminGrpcTransport, - ) - -def test_database_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatabaseAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_database_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DatabaseAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_databases', - 'create_database', - 'get_database', - 'update_database', - 'update_database_ddl', - 'drop_database', - 'get_database_ddl', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'create_backup', - 'copy_backup', - 'get_backup', - 'update_backup', - 'delete_backup', - 'list_backups', - 'restore_database', - 'list_database_operations', - 'list_backup_operations', - 'list_database_roles', - 'add_split_points', - 'create_backup_schedule', - 'get_backup_schedule', - 'update_backup_schedule', - 'delete_backup_schedule', - 'list_backup_schedules', - 'internal_update_graph_operation', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_database_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id="octopus", - ) - - -def test_database_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport() - adc.assert_called_once() - - -def test_database_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatabaseAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - ], -) -def test_database_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - transports.DatabaseAdminRestTransport, - ], -) -def test_database_admin_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DatabaseAdminGrpcTransport, grpc_helpers), - (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_database_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_database_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DatabaseAdminRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_database_admin_host_no_port(transport_name): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_database_admin_host_with_port(transport_name): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_database_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DatabaseAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DatabaseAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_databases._session - session2 = client2.transport.list_databases._session - assert session1 != session2 - session1 = client1.transport.create_database._session - session2 = client2.transport.create_database._session - assert session1 != session2 - session1 = client1.transport.get_database._session - session2 = client2.transport.get_database._session - assert session1 != session2 - session1 = client1.transport.update_database._session - session2 = client2.transport.update_database._session - assert session1 != session2 - session1 = client1.transport.update_database_ddl._session - session2 = client2.transport.update_database_ddl._session - assert session1 != session2 - session1 = client1.transport.drop_database._session - session2 = client2.transport.drop_database._session - assert session1 != session2 - session1 = client1.transport.get_database_ddl._session - session2 = client2.transport.get_database_ddl._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 - session1 = client1.transport.create_backup._session - session2 = client2.transport.create_backup._session - assert session1 != session2 - session1 = client1.transport.copy_backup._session - session2 = client2.transport.copy_backup._session - assert session1 != session2 - session1 = client1.transport.get_backup._session - session2 = client2.transport.get_backup._session - assert session1 != session2 - session1 = client1.transport.update_backup._session - session2 = client2.transport.update_backup._session - assert session1 != session2 - session1 = client1.transport.delete_backup._session - session2 = client2.transport.delete_backup._session - assert session1 != session2 - session1 = client1.transport.list_backups._session - session2 = client2.transport.list_backups._session - assert session1 != session2 - session1 = client1.transport.restore_database._session - session2 = client2.transport.restore_database._session - assert session1 != session2 - session1 = client1.transport.list_database_operations._session - session2 = client2.transport.list_database_operations._session - assert session1 != session2 - session1 = client1.transport.list_backup_operations._session - session2 = client2.transport.list_backup_operations._session - assert session1 != session2 - session1 = client1.transport.list_database_roles._session - session2 = client2.transport.list_database_roles._session - assert session1 != session2 - session1 = client1.transport.add_split_points._session - session2 = client2.transport.add_split_points._session - assert session1 != session2 - session1 = client1.transport.create_backup_schedule._session - session2 = client2.transport.create_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.get_backup_schedule._session - session2 = client2.transport.get_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.update_backup_schedule._session - session2 = client2.transport.update_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.delete_backup_schedule._session - session2 = client2.transport.delete_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.list_backup_schedules._session - session2 = client2.transport.list_backup_schedules._session - assert session1 != session2 - session1 = client1.transport.internal_update_graph_operation._session - session2 = client2.transport.internal_update_graph_operation._session - assert session1 != session2 -def test_database_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatabaseAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_database_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatabaseAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_database_admin_grpc_lro_client(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_database_admin_grpc_lro_async_client(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_path(): - project = "squid" - instance = "clam" - backup = "whelk" - expected = "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) - actual = DatabaseAdminClient.backup_path(project, instance, backup) - assert expected == actual - - -def test_parse_backup_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "backup": "nudibranch", - } - path = DatabaseAdminClient.backup_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_backup_path(path) - assert expected == actual - -def test_backup_schedule_path(): - project = "cuttlefish" - instance = "mussel" - database = "winkle" - schedule = "nautilus" - expected = "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) - actual = DatabaseAdminClient.backup_schedule_path(project, instance, database, schedule) - assert expected == actual - - -def test_parse_backup_schedule_path(): - expected = { - "project": "scallop", - "instance": "abalone", - "database": "squid", - "schedule": "clam", - } - path = DatabaseAdminClient.backup_schedule_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_backup_schedule_path(path) - assert expected == actual - -def test_crypto_key_path(): - project = "whelk" - location = "octopus" - key_ring = "oyster" - crypto_key = "nudibranch" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - actual = DatabaseAdminClient.crypto_key_path(project, location, key_ring, crypto_key) - assert expected == actual - - -def test_parse_crypto_key_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - "key_ring": "winkle", - "crypto_key": "nautilus", - } - path = DatabaseAdminClient.crypto_key_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_crypto_key_path(path) - assert expected == actual - -def test_crypto_key_version_path(): - project = "scallop" - location = "abalone" - key_ring = "squid" - crypto_key = "clam" - crypto_key_version = "whelk" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) - actual = DatabaseAdminClient.crypto_key_version_path(project, location, key_ring, crypto_key, crypto_key_version) - assert expected == actual - - -def test_parse_crypto_key_version_path(): - expected = { - "project": "octopus", - "location": "oyster", - "key_ring": "nudibranch", - "crypto_key": "cuttlefish", - "crypto_key_version": "mussel", - } - path = DatabaseAdminClient.crypto_key_version_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_crypto_key_version_path(path) - assert expected == actual - -def test_database_path(): - project = "winkle" - instance = "nautilus" - database = "scallop" - expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - actual = DatabaseAdminClient.database_path(project, instance, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "abalone", - "instance": "squid", - "database": "clam", - } - path = DatabaseAdminClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_database_path(path) - assert expected == actual - -def test_database_role_path(): - project = "whelk" - instance = "octopus" - database = "oyster" - role = "nudibranch" - expected = "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) - actual = DatabaseAdminClient.database_role_path(project, instance, database, role) - assert expected == actual - - -def test_parse_database_role_path(): - expected = { - "project": "cuttlefish", - "instance": "mussel", - "database": "winkle", - "role": "nautilus", - } - path = DatabaseAdminClient.database_role_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_database_role_path(path) - assert expected == actual - -def test_instance_path(): - project = "scallop" - instance = "abalone" - expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - actual = DatabaseAdminClient.instance_path(project, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "squid", - "instance": "clam", - } - path = DatabaseAdminClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_instance_path(path) - assert expected == actual - -def test_instance_partition_path(): - project = "whelk" - instance = "octopus" - instance_partition = "oyster" - expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - actual = DatabaseAdminClient.instance_partition_path(project, instance, instance_partition) - assert expected == actual - - -def test_parse_instance_partition_path(): - expected = { - "project": "nudibranch", - "instance": "cuttlefish", - "instance_partition": "mussel", - } - path = DatabaseAdminClient.instance_partition_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_instance_partition_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DatabaseAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = DatabaseAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = DatabaseAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = DatabaseAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DatabaseAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = DatabaseAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = DatabaseAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = DatabaseAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DatabaseAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = DatabaseAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = DatabaseAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc deleted file mode 100644 index 040d100fc8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner_admin_instance/__init__.py - google/cloud/spanner_admin_instance/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/.flake8 b/owl-bot-staging/spanner_admin_instance/v1/.flake8 deleted file mode 100644 index 90316de214..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/spanner_admin_instance/v1/LICENSE b/owl-bot-staging/spanner_admin_instance/v1/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in deleted file mode 100644 index dae249ec89..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/README.rst b/owl-bot-staging/spanner_admin_instance/v1/README.rst deleted file mode 100644 index 7123886f79..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Spanner Admin Instance API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner Admin Instance API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcf..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py deleted file mode 100644 index 4e0af4e74e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner-admin-instance documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner-admin-instance" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-instance", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-admin-instance-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-instance.tex", - u"google-cloud-spanner-admin-instance Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-spanner-admin-instance", - "google-cloud-spanner-admin-instance Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-instance", - "google-cloud-spanner-admin-instance Documentation", - author, - "google-cloud-spanner-admin-instance", - "google-cloud-spanner-admin-instance Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst deleted file mode 100644 index 545d74cbb9..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_admin_instance_v1/services_ - spanner_admin_instance_v1/types_ diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst deleted file mode 100644 index fe820b3fad..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -InstanceAdmin -------------------------------- - -.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst deleted file mode 100644 index 407d44cc34..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner Admin Instance v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - instance_admin diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst deleted file mode 100644 index 250cf6bf9b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Spanner Admin Instance v1 API -==================================================== - -.. automodule:: google.cloud.spanner_admin_instance_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py deleted file mode 100644 index a729ed0a13..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py +++ /dev/null @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_instance import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.spanner_admin_instance_v1.services.instance_admin.client import InstanceAdminClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin.async_client import InstanceAdminAsyncClient - -from google.cloud.spanner_admin_instance_v1.types.common import OperationProgress -from google.cloud.spanner_admin_instance_v1.types.common import ReplicaSelection -from google.cloud.spanner_admin_instance_v1.types.common import FulfillmentPeriod -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import AutoscalingConfig -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import FreeInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import Instance -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstanceConfig -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstancePartition -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaComputeCapacity -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaInfo -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceRequest - -__all__ = ('InstanceAdminClient', - 'InstanceAdminAsyncClient', - 'OperationProgress', - 'ReplicaSelection', - 'FulfillmentPeriod', - 'AutoscalingConfig', - 'CreateInstanceConfigMetadata', - 'CreateInstanceConfigRequest', - 'CreateInstanceMetadata', - 'CreateInstancePartitionMetadata', - 'CreateInstancePartitionRequest', - 'CreateInstanceRequest', - 'DeleteInstanceConfigRequest', - 'DeleteInstancePartitionRequest', - 'DeleteInstanceRequest', - 'FreeInstanceMetadata', - 'GetInstanceConfigRequest', - 'GetInstancePartitionRequest', - 'GetInstanceRequest', - 'Instance', - 'InstanceConfig', - 'InstancePartition', - 'ListInstanceConfigOperationsRequest', - 'ListInstanceConfigOperationsResponse', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'ListInstancePartitionOperationsRequest', - 'ListInstancePartitionOperationsResponse', - 'ListInstancePartitionsRequest', - 'ListInstancePartitionsResponse', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MoveInstanceMetadata', - 'MoveInstanceRequest', - 'MoveInstanceResponse', - 'ReplicaComputeCapacity', - 'ReplicaInfo', - 'UpdateInstanceConfigMetadata', - 'UpdateInstanceConfigRequest', - 'UpdateInstanceMetadata', - 'UpdateInstancePartitionMetadata', - 'UpdateInstancePartitionRequest', - 'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed deleted file mode 100644 index 915a8e55e3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py deleted file mode 100644 index 384e545d89..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.instance_admin import InstanceAdminClient -from .services.instance_admin import InstanceAdminAsyncClient - -from .types.common import OperationProgress -from .types.common import ReplicaSelection -from .types.common import FulfillmentPeriod -from .types.spanner_instance_admin import AutoscalingConfig -from .types.spanner_instance_admin import CreateInstanceConfigMetadata -from .types.spanner_instance_admin import CreateInstanceConfigRequest -from .types.spanner_instance_admin import CreateInstanceMetadata -from .types.spanner_instance_admin import CreateInstancePartitionMetadata -from .types.spanner_instance_admin import CreateInstancePartitionRequest -from .types.spanner_instance_admin import CreateInstanceRequest -from .types.spanner_instance_admin import DeleteInstanceConfigRequest -from .types.spanner_instance_admin import DeleteInstancePartitionRequest -from .types.spanner_instance_admin import DeleteInstanceRequest -from .types.spanner_instance_admin import FreeInstanceMetadata -from .types.spanner_instance_admin import GetInstanceConfigRequest -from .types.spanner_instance_admin import GetInstancePartitionRequest -from .types.spanner_instance_admin import GetInstanceRequest -from .types.spanner_instance_admin import Instance -from .types.spanner_instance_admin import InstanceConfig -from .types.spanner_instance_admin import InstancePartition -from .types.spanner_instance_admin import ListInstanceConfigOperationsRequest -from .types.spanner_instance_admin import ListInstanceConfigOperationsResponse -from .types.spanner_instance_admin import ListInstanceConfigsRequest -from .types.spanner_instance_admin import ListInstanceConfigsResponse -from .types.spanner_instance_admin import ListInstancePartitionOperationsRequest -from .types.spanner_instance_admin import ListInstancePartitionOperationsResponse -from .types.spanner_instance_admin import ListInstancePartitionsRequest -from .types.spanner_instance_admin import ListInstancePartitionsResponse -from .types.spanner_instance_admin import ListInstancesRequest -from .types.spanner_instance_admin import ListInstancesResponse -from .types.spanner_instance_admin import MoveInstanceMetadata -from .types.spanner_instance_admin import MoveInstanceRequest -from .types.spanner_instance_admin import MoveInstanceResponse -from .types.spanner_instance_admin import ReplicaComputeCapacity -from .types.spanner_instance_admin import ReplicaInfo -from .types.spanner_instance_admin import UpdateInstanceConfigMetadata -from .types.spanner_instance_admin import UpdateInstanceConfigRequest -from .types.spanner_instance_admin import UpdateInstanceMetadata -from .types.spanner_instance_admin import UpdateInstancePartitionMetadata -from .types.spanner_instance_admin import UpdateInstancePartitionRequest -from .types.spanner_instance_admin import UpdateInstanceRequest - -__all__ = ( - 'InstanceAdminAsyncClient', -'AutoscalingConfig', -'CreateInstanceConfigMetadata', -'CreateInstanceConfigRequest', -'CreateInstanceMetadata', -'CreateInstancePartitionMetadata', -'CreateInstancePartitionRequest', -'CreateInstanceRequest', -'DeleteInstanceConfigRequest', -'DeleteInstancePartitionRequest', -'DeleteInstanceRequest', -'FreeInstanceMetadata', -'FulfillmentPeriod', -'GetInstanceConfigRequest', -'GetInstancePartitionRequest', -'GetInstanceRequest', -'Instance', -'InstanceAdminClient', -'InstanceConfig', -'InstancePartition', -'ListInstanceConfigOperationsRequest', -'ListInstanceConfigOperationsResponse', -'ListInstanceConfigsRequest', -'ListInstanceConfigsResponse', -'ListInstancePartitionOperationsRequest', -'ListInstancePartitionOperationsResponse', -'ListInstancePartitionsRequest', -'ListInstancePartitionsResponse', -'ListInstancesRequest', -'ListInstancesResponse', -'MoveInstanceMetadata', -'MoveInstanceRequest', -'MoveInstanceResponse', -'OperationProgress', -'ReplicaComputeCapacity', -'ReplicaInfo', -'ReplicaSelection', -'UpdateInstanceConfigMetadata', -'UpdateInstanceConfigRequest', -'UpdateInstanceMetadata', -'UpdateInstancePartitionMetadata', -'UpdateInstancePartitionRequest', -'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json deleted file mode 100644 index 60fa46718a..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json +++ /dev/null @@ -1,343 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_admin_instance_v1", - "protoPackage": "google.spanner.admin.instance.v1", - "schema": "1.0", - "services": { - "InstanceAdmin": { - "clients": { - "grpc": { - "libraryClient": "InstanceAdminClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "CreateInstanceConfig": { - "methods": [ - "create_instance_config" - ] - }, - "CreateInstancePartition": { - "methods": [ - "create_instance_partition" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "DeleteInstanceConfig": { - "methods": [ - "delete_instance_config" - ] - }, - "DeleteInstancePartition": { - "methods": [ - "delete_instance_partition" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "GetInstancePartition": { - "methods": [ - "get_instance_partition" - ] - }, - "ListInstanceConfigOperations": { - "methods": [ - "list_instance_config_operations" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstancePartitionOperations": { - "methods": [ - "list_instance_partition_operations" - ] - }, - "ListInstancePartitions": { - "methods": [ - "list_instance_partitions" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "MoveInstance": { - "methods": [ - "move_instance" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateInstanceConfig": { - "methods": [ - "update_instance_config" - ] - }, - "UpdateInstancePartition": { - "methods": [ - "update_instance_partition" - ] - } - } - }, - "grpc-async": { - "libraryClient": "InstanceAdminAsyncClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "CreateInstanceConfig": { - "methods": [ - "create_instance_config" - ] - }, - "CreateInstancePartition": { - "methods": [ - "create_instance_partition" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "DeleteInstanceConfig": { - "methods": [ - "delete_instance_config" - ] - }, - "DeleteInstancePartition": { - "methods": [ - "delete_instance_partition" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "GetInstancePartition": { - "methods": [ - "get_instance_partition" - ] - }, - "ListInstanceConfigOperations": { - "methods": [ - "list_instance_config_operations" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstancePartitionOperations": { - "methods": [ - "list_instance_partition_operations" - ] - }, - "ListInstancePartitions": { - "methods": [ - "list_instance_partitions" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "MoveInstance": { - "methods": [ - "move_instance" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateInstanceConfig": { - "methods": [ - "update_instance_config" - ] - }, - "UpdateInstancePartition": { - "methods": [ - "update_instance_partition" - ] - } - } - }, - "rest": { - "libraryClient": "InstanceAdminClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "CreateInstanceConfig": { - "methods": [ - "create_instance_config" - ] - }, - "CreateInstancePartition": { - "methods": [ - "create_instance_partition" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "DeleteInstanceConfig": { - "methods": [ - "delete_instance_config" - ] - }, - "DeleteInstancePartition": { - "methods": [ - "delete_instance_partition" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "GetInstancePartition": { - "methods": [ - "get_instance_partition" - ] - }, - "ListInstanceConfigOperations": { - "methods": [ - "list_instance_config_operations" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstancePartitionOperations": { - "methods": [ - "list_instance_partition_operations" - ] - }, - "ListInstancePartitions": { - "methods": [ - "list_instance_partitions" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "MoveInstance": { - "methods": [ - "move_instance" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateInstanceConfig": { - "methods": [ - "update_instance_config" - ] - }, - "UpdateInstancePartition": { - "methods": [ - "update_instance_partition" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed deleted file mode 100644 index 915a8e55e3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py deleted file mode 100644 index 72ef4ab187..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import InstanceAdminClient -from .async_client import InstanceAdminAsyncClient - -__all__ = ( - 'InstanceAdminClient', - 'InstanceAdminAsyncClient', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py deleted file mode 100644 index de12cfc17e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ /dev/null @@ -1,3468 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import uuid - -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .client import InstanceAdminClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class InstanceAdminAsyncClient: - """Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - """ - - _client: InstanceAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = InstanceAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = InstanceAdminClient._DEFAULT_UNIVERSE - - instance_path = staticmethod(InstanceAdminClient.instance_path) - parse_instance_path = staticmethod(InstanceAdminClient.parse_instance_path) - instance_config_path = staticmethod(InstanceAdminClient.instance_config_path) - parse_instance_config_path = staticmethod(InstanceAdminClient.parse_instance_config_path) - instance_partition_path = staticmethod(InstanceAdminClient.instance_partition_path) - parse_instance_partition_path = staticmethod(InstanceAdminClient.parse_instance_partition_path) - common_billing_account_path = staticmethod(InstanceAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(InstanceAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(InstanceAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(InstanceAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(InstanceAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(InstanceAdminClient.parse_common_organization_path) - common_project_path = staticmethod(InstanceAdminClient.common_project_path) - parse_common_project_path = staticmethod(InstanceAdminClient.parse_common_project_path) - common_location_path = staticmethod(InstanceAdminClient.common_location_path) - parse_common_location_path = staticmethod(InstanceAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminAsyncClient: The constructed client. - """ - return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminAsyncClient: The constructed client. - """ - return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return InstanceAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> InstanceAdminTransport: - """Returns the transport used by the client instance. - - Returns: - InstanceAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = InstanceAdminClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the instance admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the InstanceAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = InstanceAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.instance_v1.InstanceAdminAsyncClient`.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "credentialsType": None, - } - ) - - async def list_instance_configs(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigsAsyncPager: - r"""Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]]): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - parent (:class:`str`): - Required. The name of the project for which a list of - supported instance configurations is requested. Values - are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): - request = spanner_instance_admin.ListInstanceConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstanceConfigsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance_config(self, - request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstanceConfig: - r"""Gets information about a particular instance - configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]]): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - name (:class:`str`): - Required. The name of the requested instance - configuration. Values are of the form - ``projects//instanceConfigs/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): - request = spanner_instance_admin.GetInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance_config(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - instance_config_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]]): - The request object. The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - parent (:class:`str`): - Required. The name of the project in which to create the - instance configuration. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): - Required. The ``InstanceConfig`` proto of the - configuration to create. ``instance_config.name`` must - be ``/instanceConfigs/``. - ``instance_config.base_config`` must be a Google-managed - configuration name, e.g. /instanceConfigs/us-east1, - /instanceConfigs/nam3. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config_id (:class:`str`): - Required. The ID of the instance configuration to - create. Valid identifiers are of the form - ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and - 64 characters in length. The ``custom-`` prefix is - required to avoid name conflicts with Google-managed - configurations. - - This corresponds to the ``instance_config_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_config, instance_config_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): - request = spanner_instance_admin.CreateInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_config is not None: - request.instance_config = instance_config - if instance_config_id is not None: - request.instance_config_id = instance_config_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - async def update_instance_config(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, - *, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]]): - The request object. The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): - Required. The user instance configuration to update, - which must always include the instance configuration - name. Otherwise, only fields mentioned in - [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] - need be included. To prevent conflicts of concurrent - updates, - [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - can be used. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - from being erased accidentally by clients that do not - know about them. Only display_name and labels can be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): - request = spanner_instance_admin.UpdateInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_config is not None: - request.instance_config = instance_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_config.name", request.instance_config.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance_config(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_config(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]]): - The request object. The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - name (:class:`str`): - Required. The name of the instance configuration to be - deleted. Values are of the form - ``projects//instanceConfigs/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): - request = spanner_instance_admin.DeleteInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_instance_config_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigOperationsAsyncPager: - r"""Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]]): - The request object. The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - parent (:class:`str`): - Required. The project of the instance configuration - operations. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager: - The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): - request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_config_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstanceConfigOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_instances(self, - request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: - r"""Lists all instances in the given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]]): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - parent (:class:`str`): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancesRequest): - request = spanner_instance_admin.ListInstancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_instance_partitions(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionsAsyncPager: - r"""Lists all instance partitions for the given instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]]): - The request object. The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - parent (:class:`str`): - Required. The instance whose instance partitions should - be listed. Values are of the form - ``projects//instances/``. Use - ``{instance} = '-'`` to list instance partitions for all - Instances in a project, e.g., - ``projects/myproject/instances/-``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager: - The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): - request = spanner_instance_admin.ListInstancePartitionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancePartitionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance(self, - request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.Instance: - r"""Gets information about a particular instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]]): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - name (:class:`str`): - Required. The name of the requested instance. Values are - of the form ``projects//instances/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceRequest): - request = spanner_instance_admin.GetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[spanner_instance_admin.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]]): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - parent (:class:`str`): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (:class:`str`): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): - Required. The instance to create. The name may be - omitted, but if specified must be - ``/instances/``. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): - request = spanner_instance_admin.CreateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if instance is not None: - request.instance = instance - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.CreateInstanceMetadata, - ) - - # Done; return the response. - return response - - async def update_instance(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[spanner_instance_admin.Instance] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]]): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): - Required. The instance to update, which must always - include the instance name. Otherwise, only fields - mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] - should be updated. The field mask must always be - specified; this prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): - request = spanner_instance_admin.UpdateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.UpdateInstanceMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]]): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - name (:class:`str`): - Required. The name of the instance to be deleted. Values - are of the form - ``projects//instances/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): - request = spanner_instance_admin.DeleteInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`MutableSequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance_partition(self, - request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstancePartition: - r"""Gets information about a particular instance - partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]]): - The request object. The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - name (:class:`str`): - Required. The name of the requested instance partition. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstancePartition: - An isolated set of Cloud Spanner - resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): - request = spanner_instance_admin.GetInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance_partition(self, - request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - instance_partition_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]]): - The request object. The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - parent (:class:`str`): - Required. The name of the instance in which to create - the instance partition. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): - Required. The instance partition to create. The - instance_partition.name may be omitted, but if specified - must be - ``/instancePartitions/``. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition_id (:class:`str`): - Required. The ID of the instance partition to create. - Valid identifiers are of the form - ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 - characters in length. - - This corresponds to the ``instance_partition_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_partition, instance_partition_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): - request = spanner_instance_admin.CreateInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_partition is not None: - request.instance_partition = instance_partition - if instance_partition_id is not None: - request.instance_partition_id = instance_partition_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance_partition(self, - request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_partition(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]]): - The request object. The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - name (:class:`str`): - Required. The name of the instance partition to be - deleted. Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): - request = spanner_instance_admin.DeleteInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_instance_partition(self, - request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, - *, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]]): - The request object. The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): - Required. The instance partition to update, which must - always include the instance partition name. Otherwise, - only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] - need be included. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_partition, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): - request = spanner_instance_admin.UpdateInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_partition is not None: - request.instance_partition = instance_partition - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_partition.name", request.instance_partition.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - async def list_instance_partition_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionOperationsAsyncPager: - r"""Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]]): - The request object. The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - parent (:class:`str`): - Required. The parent instance of the instance partition - operations. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager: - The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): - request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partition_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancePartitionOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def move_instance(self, - request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]]): - The request object. The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): - request = spanner_instance_admin.MoveInstanceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.move_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.MoveInstanceResponse, - metadata_type=spanner_instance_admin.MoveInstanceMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "InstanceAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "InstanceAdminAsyncClient", -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py deleted file mode 100644 index 6f504cdc37..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ /dev/null @@ -1,3849 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import uuid -import warnings - -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import InstanceAdminGrpcTransport -from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .transports.rest import InstanceAdminRestTransport - - -class InstanceAdminClientMeta(type): - """Metaclass for the InstanceAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] - _transport_registry["grpc"] = InstanceAdminGrpcTransport - _transport_registry["grpc_asyncio"] = InstanceAdminGrpcAsyncIOTransport - _transport_registry["rest"] = InstanceAdminRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[InstanceAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class InstanceAdminClient(metaclass=InstanceAdminClientMeta): - """Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> InstanceAdminTransport: - """Returns the transport used by the client instance. - - Returns: - InstanceAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def instance_path(project: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_config_path(project: str,instance_config: str,) -> str: - """Returns a fully-qualified instance_config string.""" - return "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) - - @staticmethod - def parse_instance_config_path(path: str) -> Dict[str,str]: - """Parses a instance_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instanceConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: - """Returns a fully-qualified instance_partition string.""" - return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - - @staticmethod - def parse_instance_partition_path(path: str) -> Dict[str,str]: - """Parses a instance_partition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = InstanceAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = InstanceAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the instance admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the InstanceAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = InstanceAdminClient._read_environment_variables() - self._client_cert_source = InstanceAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = InstanceAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, InstanceAdminTransport) - if transport_provided: - # transport is a InstanceAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(InstanceAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - InstanceAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[InstanceAdminTransport], Callable[..., InstanceAdminTransport]] = ( - InstanceAdminClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., InstanceAdminTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.instance_v1.InstanceAdminClient`.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "credentialsType": None, - } - ) - - def list_instance_configs(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigsPager: - r"""Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - parent (str): - Required. The name of the project for which a list of - supported instance configurations is requested. Values - are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): - request = spanner_instance_admin.ListInstanceConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstanceConfigsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance_config(self, - request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstanceConfig: - r"""Gets information about a particular instance - configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - name (str): - Required. The name of the requested instance - configuration. Values are of the form - ``projects//instanceConfigs/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): - request = spanner_instance_admin.GetInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance_config(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - instance_config_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]): - The request object. The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - parent (str): - Required. The name of the project in which to create the - instance configuration. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The ``InstanceConfig`` proto of the - configuration to create. ``instance_config.name`` must - be ``/instanceConfigs/``. - ``instance_config.base_config`` must be a Google-managed - configuration name, e.g. /instanceConfigs/us-east1, - /instanceConfigs/nam3. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config_id (str): - Required. The ID of the instance configuration to - create. Valid identifiers are of the form - ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and - 64 characters in length. The ``custom-`` prefix is - required to avoid name conflicts with Google-managed - configurations. - - This corresponds to the ``instance_config_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_config, instance_config_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): - request = spanner_instance_admin.CreateInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_config is not None: - request.instance_config = instance_config - if instance_config_id is not None: - request.instance_config_id = instance_config_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - def update_instance_config(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, - *, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]): - The request object. The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The user instance configuration to update, - which must always include the instance configuration - name. Otherwise, only fields mentioned in - [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] - need be included. To prevent conflicts of concurrent - updates, - [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - can be used. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - from being erased accidentally by clients that do not - know about them. Only display_name and labels can be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): - request = spanner_instance_admin.UpdateInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_config is not None: - request.instance_config = instance_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_config.name", request.instance_config.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - def delete_instance_config(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_config(request=request) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]): - The request object. The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - name (str): - Required. The name of the instance configuration to be - deleted. Values are of the form - ``projects//instanceConfigs/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): - request = spanner_instance_admin.DeleteInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_instance_config_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigOperationsPager: - r"""Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]): - The request object. The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - parent (str): - Required. The project of the instance configuration - operations. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager: - The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): - request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_config_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstanceConfigOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_instances(self, - request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: - r"""Lists all instances in the given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - parent (str): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancesRequest): - request = spanner_instance_admin.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_instance_partitions(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionsPager: - r"""Lists all instance partitions for the given instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]): - The request object. The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - parent (str): - Required. The instance whose instance partitions should - be listed. Values are of the form - ``projects//instances/``. Use - ``{instance} = '-'`` to list instance partitions for all - Instances in a project, e.g., - ``projects/myproject/instances/-``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager: - The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): - request = spanner_instance_admin.ListInstancePartitionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancePartitionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance(self, - request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.Instance: - r"""Gets information about a particular instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - name (str): - Required. The name of the requested instance. Values are - of the form ``projects//instances/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceRequest): - request = spanner_instance_admin.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[spanner_instance_admin.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - parent (str): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (str): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to create. The name may be - omitted, but if specified must be - ``/instances/``. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): - request = spanner_instance_admin.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if instance is not None: - request.instance = instance - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.CreateInstanceMetadata, - ) - - # Done; return the response. - return response - - def update_instance(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[spanner_instance_admin.Instance] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to update, which must always - include the instance name. Otherwise, only fields - mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] - should be updated. The field mask must always be - specified; this prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): - request = spanner_instance_admin.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.UpdateInstanceMetadata, - ) - - # Done; return the response. - return response - - def delete_instance(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - client.delete_instance(request=request) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - name (str): - Required. The name of the instance to be deleted. Values - are of the form - ``projects//instances/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): - request = spanner_instance_admin.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (MutableSequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance_partition(self, - request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstancePartition: - r"""Gets information about a particular instance - partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]): - The request object. The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - name (str): - Required. The name of the requested instance partition. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstancePartition: - An isolated set of Cloud Spanner - resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): - request = spanner_instance_admin.GetInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance_partition(self, - request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - instance_partition_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]): - The request object. The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - parent (str): - Required. The name of the instance in which to create - the instance partition. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to create. The - instance_partition.name may be omitted, but if specified - must be - ``/instancePartitions/``. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition_id (str): - Required. The ID of the instance partition to create. - Valid identifiers are of the form - ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 - characters in length. - - This corresponds to the ``instance_partition_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_partition, instance_partition_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): - request = spanner_instance_admin.CreateInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_partition is not None: - request.instance_partition = instance_partition - if instance_partition_id is not None: - request.instance_partition_id = instance_partition_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - def delete_instance_partition(self, - request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_partition(request=request) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]): - The request object. The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - name (str): - Required. The name of the instance partition to be - deleted. Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): - request = spanner_instance_admin.DeleteInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_instance_partition(self, - request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, - *, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]): - The request object. The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to update, which must - always include the instance partition name. Otherwise, - only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] - need be included. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_partition, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): - request = spanner_instance_admin.UpdateInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_partition is not None: - request.instance_partition = instance_partition - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_partition.name", request.instance_partition.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - def list_instance_partition_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionOperationsPager: - r"""Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]): - The request object. The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - parent (str): - Required. The parent instance of the instance partition - operations. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager: - The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): - request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_partition_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancePartitionOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def move_instance(self, - request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]): - The request object. The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): - request = spanner_instance_admin.MoveInstanceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.move_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.MoveInstanceResponse, - metadata_type=spanner_instance_admin.MoveInstanceMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "InstanceAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "InstanceAdminClient", -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py deleted file mode 100644 index bde9847337..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py +++ /dev/null @@ -1,723 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.longrunning import operations_pb2 # type: ignore - - -class ListInstanceConfigsPager: - """A pager for iterating through ``list_instance_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instance_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstanceConfigs`` requests and continue to iterate - through the ``instance_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstanceConfigsResponse], - request: spanner_instance_admin.ListInstanceConfigsRequest, - response: spanner_instance_admin.ListInstanceConfigsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: - for page in self.pages: - yield from page.instance_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigsAsyncPager: - """A pager for iterating through ``list_instance_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instance_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstanceConfigs`` requests and continue to iterate - through the ``instance_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]], - request: spanner_instance_admin.ListInstanceConfigsRequest, - response: spanner_instance_admin.ListInstanceConfigsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.instance_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigOperationsPager: - """A pager for iterating through ``list_instance_config_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstanceConfigOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstanceConfigOperationsResponse], - request: spanner_instance_admin.ListInstanceConfigOperationsRequest, - response: spanner_instance_admin.ListInstanceConfigOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigOperationsAsyncPager: - """A pager for iterating through ``list_instance_config_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstanceConfigOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]], - request: spanner_instance_admin.ListInstanceConfigOperationsRequest, - response: spanner_instance_admin.ListInstanceConfigOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancesResponse], - request: spanner_instance_admin.ListInstancesRequest, - response: spanner_instance_admin.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: - for page in self.pages: - yield from page.instances - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesAsyncPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancesResponse]], - request: spanner_instance_admin.ListInstancesRequest, - response: spanner_instance_admin.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: - async def async_generator(): - async for page in self.pages: - for response in page.instances: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionsPager: - """A pager for iterating through ``list_instance_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instance_partitions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstancePartitions`` requests and continue to iterate - through the ``instance_partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancePartitionsResponse], - request: spanner_instance_admin.ListInstancePartitionsRequest, - response: spanner_instance_admin.ListInstancePartitionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.InstancePartition]: - for page in self.pages: - yield from page.instance_partitions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionsAsyncPager: - """A pager for iterating through ``list_instance_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instance_partitions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstancePartitions`` requests and continue to iterate - through the ``instance_partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]], - request: spanner_instance_admin.ListInstancePartitionsRequest, - response: spanner_instance_admin.ListInstancePartitionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstancePartition]: - async def async_generator(): - async for page in self.pages: - for response in page.instance_partitions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionOperationsPager: - """A pager for iterating through ``list_instance_partition_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstancePartitionOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancePartitionOperationsResponse], - request: spanner_instance_admin.ListInstancePartitionOperationsRequest, - response: spanner_instance_admin.ListInstancePartitionOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionOperationsAsyncPager: - """A pager for iterating through ``list_instance_partition_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstancePartitionOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]], - request: spanner_instance_admin.ListInstancePartitionOperationsRequest, - response: spanner_instance_admin.ListInstancePartitionOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst deleted file mode 100644 index 762ac0c765..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`InstanceAdminTransport` is the ABC for all transports. -- public child `InstanceAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `InstanceAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseInstanceAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `InstanceAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py deleted file mode 100644 index 1892e6a551..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import InstanceAdminTransport -from .grpc import InstanceAdminGrpcTransport -from .grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .rest import InstanceAdminRestTransport -from .rest import InstanceAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] -_transport_registry['grpc'] = InstanceAdminGrpcTransport -_transport_registry['grpc_asyncio'] = InstanceAdminGrpcAsyncIOTransport -_transport_registry['rest'] = InstanceAdminRestTransport - -__all__ = ( - 'InstanceAdminTransport', - 'InstanceAdminGrpcTransport', - 'InstanceAdminGrpcAsyncIOTransport', - 'InstanceAdminRestTransport', - 'InstanceAdminRestInterceptor', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py deleted file mode 100644 index cac35e8288..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ /dev/null @@ -1,567 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class InstanceAdminTransport(abc.ABC): - """Abstract transport class for InstanceAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_instance_configs: gapic_v1.method.wrap_method( - self.list_instance_configs, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_instance_config: gapic_v1.method.wrap_method( - self.get_instance_config, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance_config: gapic_v1.method.wrap_method( - self.create_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_config: gapic_v1.method.wrap_method( - self.update_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_config: gapic_v1.method.wrap_method( - self.delete_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_config_operations: gapic_v1.method.wrap_method( - self.list_instance_config_operations, - default_timeout=None, - client_info=client_info, - ), - self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_instance_partitions: gapic_v1.method.wrap_method( - self.list_instance_partitions, - default_timeout=None, - client_info=client_info, - ), - self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.get_instance_partition: gapic_v1.method.wrap_method( - self.get_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.create_instance_partition: gapic_v1.method.wrap_method( - self.create_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_partition: gapic_v1.method.wrap_method( - self.delete_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_partition: gapic_v1.method.wrap_method( - self.update_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_partition_operations: gapic_v1.method.wrap_method( - self.list_instance_partition_operations, - default_timeout=None, - client_info=client_info, - ), - self.move_instance: gapic_v1.method.wrap_method( - self.move_instance, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - Union[ - spanner_instance_admin.ListInstanceConfigsResponse, - Awaitable[spanner_instance_admin.ListInstanceConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - Union[ - spanner_instance_admin.InstanceConfig, - Awaitable[spanner_instance_admin.InstanceConfig] - ]]: - raise NotImplementedError() - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - Union[ - spanner_instance_admin.ListInstanceConfigOperationsResponse, - Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - Union[ - spanner_instance_admin.ListInstancesResponse, - Awaitable[spanner_instance_admin.ListInstancesResponse] - ]]: - raise NotImplementedError() - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - Union[ - spanner_instance_admin.ListInstancePartitionsResponse, - Awaitable[spanner_instance_admin.ListInstancePartitionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - Union[ - spanner_instance_admin.Instance, - Awaitable[spanner_instance_admin.Instance] - ]]: - raise NotImplementedError() - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - Union[ - spanner_instance_admin.InstancePartition, - Awaitable[spanner_instance_admin.InstancePartition] - ]]: - raise NotImplementedError() - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - Union[ - spanner_instance_admin.ListInstancePartitionOperationsResponse, - Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'InstanceAdminTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py deleted file mode 100644 index 0319e519cc..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ /dev/null @@ -1,1359 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class InstanceAdminGrpcTransport(InstanceAdminTransport): - """gRPC backend transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - spanner_instance_admin.ListInstanceConfigsResponse]: - r"""Return a callable for the list instance configs method over gRPC. - - Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - Returns: - Callable[[~.ListInstanceConfigsRequest], - ~.ListInstanceConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_configs' not in self._stubs: - self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', - request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, - ) - return self._stubs['list_instance_configs'] - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - spanner_instance_admin.InstanceConfig]: - r"""Return a callable for the get instance config method over gRPC. - - Gets information about a particular instance - configuration. - - Returns: - Callable[[~.GetInstanceConfigRequest], - ~.InstanceConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_config' not in self._stubs: - self._stubs['get_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', - request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, - response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, - ) - return self._stubs['get_instance_config'] - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance config method over gRPC. - - Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - Returns: - Callable[[~.CreateInstanceConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_config' not in self._stubs: - self._stubs['create_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', - request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_config'] - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance config method over gRPC. - - Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.UpdateInstanceConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_config' not in self._stubs: - self._stubs['update_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', - request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_config'] - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance config method over gRPC. - - Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.DeleteInstanceConfigRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_config' not in self._stubs: - self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', - request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_config'] - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - spanner_instance_admin.ListInstanceConfigOperationsResponse]: - r"""Return a callable for the list instance config - operations method over gRPC. - - Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Returns: - Callable[[~.ListInstanceConfigOperationsRequest], - ~.ListInstanceConfigOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_config_operations' not in self._stubs: - self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', - request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, - ) - return self._stubs['list_instance_config_operations'] - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - spanner_instance_admin.ListInstancesResponse]: - r"""Return a callable for the list instances method over gRPC. - - Lists all instances in the given project. - - Returns: - Callable[[~.ListInstancesRequest], - ~.ListInstancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', - request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - spanner_instance_admin.ListInstancePartitionsResponse]: - r"""Return a callable for the list instance partitions method over gRPC. - - Lists all instance partitions for the given instance. - - Returns: - Callable[[~.ListInstancePartitionsRequest], - ~.ListInstancePartitionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partitions' not in self._stubs: - self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', - request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, - ) - return self._stubs['list_instance_partitions'] - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - spanner_instance_admin.Instance]: - r"""Return a callable for the get instance method over gRPC. - - Gets information about a particular instance. - - Returns: - Callable[[~.GetInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', - request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, - response_deserializer=spanner_instance_admin.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance method over gRPC. - - Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Returns: - Callable[[~.CreateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', - request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance'] - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance method over gRPC. - - Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Returns: - Callable[[~.UpdateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', - request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Returns: - Callable[[~.DeleteInstanceRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', - request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - spanner_instance_admin.InstancePartition]: - r"""Return a callable for the get instance partition method over gRPC. - - Gets information about a particular instance - partition. - - Returns: - Callable[[~.GetInstancePartitionRequest], - ~.InstancePartition]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_partition' not in self._stubs: - self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', - request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, - response_deserializer=spanner_instance_admin.InstancePartition.deserialize, - ) - return self._stubs['get_instance_partition'] - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance partition method over gRPC. - - Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Returns: - Callable[[~.CreateInstancePartitionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_partition' not in self._stubs: - self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', - request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_partition'] - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance partition method over gRPC. - - Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.DeleteInstancePartitionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_partition' not in self._stubs: - self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', - request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_partition'] - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance partition method over gRPC. - - Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.UpdateInstancePartitionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_partition' not in self._stubs: - self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', - request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_partition'] - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - spanner_instance_admin.ListInstancePartitionOperationsResponse]: - r"""Return a callable for the list instance partition - operations method over gRPC. - - Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - Returns: - Callable[[~.ListInstancePartitionOperationsRequest], - ~.ListInstancePartitionOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partition_operations' not in self._stubs: - self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', - request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, - ) - return self._stubs['list_instance_partition_operations'] - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the move instance method over gRPC. - - Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - Returns: - Callable[[~.MoveInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'move_instance' not in self._stubs: - self._stubs['move_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', - request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['move_instance'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'InstanceAdminGrpcTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py deleted file mode 100644 index 01364ea833..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,1560 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import InstanceAdminGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): - """gRPC AsyncIO backend transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]]: - r"""Return a callable for the list instance configs method over gRPC. - - Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - Returns: - Callable[[~.ListInstanceConfigsRequest], - Awaitable[~.ListInstanceConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_configs' not in self._stubs: - self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', - request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, - ) - return self._stubs['list_instance_configs'] - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - Awaitable[spanner_instance_admin.InstanceConfig]]: - r"""Return a callable for the get instance config method over gRPC. - - Gets information about a particular instance - configuration. - - Returns: - Callable[[~.GetInstanceConfigRequest], - Awaitable[~.InstanceConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_config' not in self._stubs: - self._stubs['get_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', - request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, - response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, - ) - return self._stubs['get_instance_config'] - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance config method over gRPC. - - Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - Returns: - Callable[[~.CreateInstanceConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_config' not in self._stubs: - self._stubs['create_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', - request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_config'] - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance config method over gRPC. - - Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.UpdateInstanceConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_config' not in self._stubs: - self._stubs['update_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', - request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_config'] - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance config method over gRPC. - - Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.DeleteInstanceConfigRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_config' not in self._stubs: - self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', - request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_config'] - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]]: - r"""Return a callable for the list instance config - operations method over gRPC. - - Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Returns: - Callable[[~.ListInstanceConfigOperationsRequest], - Awaitable[~.ListInstanceConfigOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_config_operations' not in self._stubs: - self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', - request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, - ) - return self._stubs['list_instance_config_operations'] - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - Awaitable[spanner_instance_admin.ListInstancesResponse]]: - r"""Return a callable for the list instances method over gRPC. - - Lists all instances in the given project. - - Returns: - Callable[[~.ListInstancesRequest], - Awaitable[~.ListInstancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', - request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]]: - r"""Return a callable for the list instance partitions method over gRPC. - - Lists all instance partitions for the given instance. - - Returns: - Callable[[~.ListInstancePartitionsRequest], - Awaitable[~.ListInstancePartitionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partitions' not in self._stubs: - self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', - request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, - ) - return self._stubs['list_instance_partitions'] - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - Awaitable[spanner_instance_admin.Instance]]: - r"""Return a callable for the get instance method over gRPC. - - Gets information about a particular instance. - - Returns: - Callable[[~.GetInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', - request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, - response_deserializer=spanner_instance_admin.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance method over gRPC. - - Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Returns: - Callable[[~.CreateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', - request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance'] - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance method over gRPC. - - Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Returns: - Callable[[~.UpdateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', - request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Returns: - Callable[[~.DeleteInstanceRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', - request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - Awaitable[spanner_instance_admin.InstancePartition]]: - r"""Return a callable for the get instance partition method over gRPC. - - Gets information about a particular instance - partition. - - Returns: - Callable[[~.GetInstancePartitionRequest], - Awaitable[~.InstancePartition]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_partition' not in self._stubs: - self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', - request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, - response_deserializer=spanner_instance_admin.InstancePartition.deserialize, - ) - return self._stubs['get_instance_partition'] - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance partition method over gRPC. - - Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Returns: - Callable[[~.CreateInstancePartitionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_partition' not in self._stubs: - self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', - request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_partition'] - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance partition method over gRPC. - - Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.DeleteInstancePartitionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_partition' not in self._stubs: - self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', - request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_partition'] - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance partition method over gRPC. - - Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.UpdateInstancePartitionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_partition' not in self._stubs: - self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', - request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_partition'] - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]]: - r"""Return a callable for the list instance partition - operations method over gRPC. - - Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - Returns: - Callable[[~.ListInstancePartitionOperationsRequest], - Awaitable[~.ListInstancePartitionOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partition_operations' not in self._stubs: - self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', - request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, - ) - return self._stubs['list_instance_partition_operations'] - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the move instance method over gRPC. - - Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - Returns: - Callable[[~.MoveInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'move_instance' not in self._stubs: - self._stubs['move_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', - request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['move_instance'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_instance_configs: self._wrap_method( - self.list_instance_configs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_instance_config: self._wrap_method( - self.get_instance_config, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance_config: self._wrap_method( - self.create_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_config: self._wrap_method( - self.update_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_config: self._wrap_method( - self.delete_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_config_operations: self._wrap_method( - self.list_instance_config_operations, - default_timeout=None, - client_info=client_info, - ), - self.list_instances: self._wrap_method( - self.list_instances, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_instance_partitions: self._wrap_method( - self.list_instance_partitions, - default_timeout=None, - client_info=client_info, - ), - self.get_instance: self._wrap_method( - self.get_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance: self._wrap_method( - self.create_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.update_instance: self._wrap_method( - self.update_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_instance: self._wrap_method( - self.delete_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.get_instance_partition: self._wrap_method( - self.get_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.create_instance_partition: self._wrap_method( - self.create_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_partition: self._wrap_method( - self.delete_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_partition: self._wrap_method( - self.update_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_partition_operations: self._wrap_method( - self.list_instance_partition_operations, - default_timeout=None, - client_info=client_info, - ), - self.move_instance: self._wrap_method( - self.move_instance, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'InstanceAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py deleted file mode 100644 index 1d9124d35d..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py +++ /dev/null @@ -1,4462 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseInstanceAdminRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class InstanceAdminRestInterceptor: - """Interceptor for InstanceAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the InstanceAdminRestTransport. - - .. code-block:: python - class MyCustomInstanceAdminInterceptor(InstanceAdminRestInterceptor): - def pre_create_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance_partition(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance_partition(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_config_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_config_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_configs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_configs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_partition_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_partition_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_partitions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_partitions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instances(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instances(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_move_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_move_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance_partition(self, response): - logging.log(f"Received response: {response}") - return response - - transport = InstanceAdminRestTransport(interceptor=MyCustomInstanceAdminInterceptor()) - client = InstanceAdminClient(transport=transport) - - - """ - def pre_create_instance(self, request: spanner_instance_admin.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance - - DEPRECATED. Please use the `post_create_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_create_instance` interceptor runs - before the `post_create_instance_with_metadata` interceptor. - """ - return response - - def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_create_instance_with_metadata` - interceptor in new development instead of the `post_create_instance` interceptor. - When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the - `post_create_instance` interceptor. The (possibly modified) response returned by - `post_create_instance` will be passed to - `post_create_instance_with_metadata`. - """ - return response, metadata - - def pre_create_instance_config(self, request: spanner_instance_admin.CreateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_create_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance_config - - DEPRECATED. Please use the `post_create_instance_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_create_instance_config` interceptor runs - before the `post_create_instance_config_with_metadata` interceptor. - """ - return response - - def post_create_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_instance_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_create_instance_config_with_metadata` - interceptor in new development instead of the `post_create_instance_config` interceptor. - When both interceptors are used, this `post_create_instance_config_with_metadata` interceptor runs after the - `post_create_instance_config` interceptor. The (possibly modified) response returned by - `post_create_instance_config` will be passed to - `post_create_instance_config_with_metadata`. - """ - return response, metadata - - def pre_create_instance_partition(self, request: spanner_instance_admin.CreateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_create_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance_partition - - DEPRECATED. Please use the `post_create_instance_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_create_instance_partition` interceptor runs - before the `post_create_instance_partition_with_metadata` interceptor. - """ - return response - - def post_create_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_instance_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_create_instance_partition_with_metadata` - interceptor in new development instead of the `post_create_instance_partition` interceptor. - When both interceptors are used, this `post_create_instance_partition_with_metadata` interceptor runs after the - `post_create_instance_partition` interceptor. The (possibly modified) response returned by - `post_create_instance_partition` will be passed to - `post_create_instance_partition_with_metadata`. - """ - return response, metadata - - def pre_delete_instance(self, request: spanner_instance_admin.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def pre_delete_instance_config(self, request: spanner_instance_admin.DeleteInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def pre_delete_instance_partition(self, request: spanner_instance_admin.DeleteInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_get_instance(self, request: spanner_instance_admin.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_instance(self, response: spanner_instance_admin.Instance) -> spanner_instance_admin.Instance: - """Post-rpc interceptor for get_instance - - DEPRECATED. Please use the `post_get_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_instance` interceptor runs - before the `post_get_instance_with_metadata` interceptor. - """ - return response - - def post_get_instance_with_metadata(self, response: spanner_instance_admin.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_instance_with_metadata` - interceptor in new development instead of the `post_get_instance` interceptor. - When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the - `post_get_instance` interceptor. The (possibly modified) response returned by - `post_get_instance` will be passed to - `post_get_instance_with_metadata`. - """ - return response, metadata - - def pre_get_instance_config(self, request: spanner_instance_admin.GetInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_instance_config(self, response: spanner_instance_admin.InstanceConfig) -> spanner_instance_admin.InstanceConfig: - """Post-rpc interceptor for get_instance_config - - DEPRECATED. Please use the `post_get_instance_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_instance_config` interceptor runs - before the `post_get_instance_config_with_metadata` interceptor. - """ - return response - - def post_get_instance_config_with_metadata(self, response: spanner_instance_admin.InstanceConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstanceConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_instance_config_with_metadata` - interceptor in new development instead of the `post_get_instance_config` interceptor. - When both interceptors are used, this `post_get_instance_config_with_metadata` interceptor runs after the - `post_get_instance_config` interceptor. The (possibly modified) response returned by - `post_get_instance_config` will be passed to - `post_get_instance_config_with_metadata`. - """ - return response, metadata - - def pre_get_instance_partition(self, request: spanner_instance_admin.GetInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_instance_partition(self, response: spanner_instance_admin.InstancePartition) -> spanner_instance_admin.InstancePartition: - """Post-rpc interceptor for get_instance_partition - - DEPRECATED. Please use the `post_get_instance_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_instance_partition` interceptor runs - before the `post_get_instance_partition_with_metadata` interceptor. - """ - return response - - def post_get_instance_partition_with_metadata(self, response: spanner_instance_admin.InstancePartition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstancePartition, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_instance_partition_with_metadata` - interceptor in new development instead of the `post_get_instance_partition` interceptor. - When both interceptors are used, this `post_get_instance_partition_with_metadata` interceptor runs after the - `post_get_instance_partition` interceptor. The (possibly modified) response returned by - `post_get_instance_partition` will be passed to - `post_get_instance_partition_with_metadata`. - """ - return response, metadata - - def pre_list_instance_config_operations(self, request: spanner_instance_admin.ListInstanceConfigOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_config_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_config_operations(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: - """Post-rpc interceptor for list_instance_config_operations - - DEPRECATED. Please use the `post_list_instance_config_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_config_operations` interceptor runs - before the `post_list_instance_config_operations_with_metadata` interceptor. - """ - return response - - def post_list_instance_config_operations_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_config_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_config_operations_with_metadata` - interceptor in new development instead of the `post_list_instance_config_operations` interceptor. - When both interceptors are used, this `post_list_instance_config_operations_with_metadata` interceptor runs after the - `post_list_instance_config_operations` interceptor. The (possibly modified) response returned by - `post_list_instance_config_operations` will be passed to - `post_list_instance_config_operations_with_metadata`. - """ - return response, metadata - - def pre_list_instance_configs(self, request: spanner_instance_admin.ListInstanceConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_configs - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_configs(self, response: spanner_instance_admin.ListInstanceConfigsResponse) -> spanner_instance_admin.ListInstanceConfigsResponse: - """Post-rpc interceptor for list_instance_configs - - DEPRECATED. Please use the `post_list_instance_configs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_configs` interceptor runs - before the `post_list_instance_configs_with_metadata` interceptor. - """ - return response - - def post_list_instance_configs_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_configs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_configs_with_metadata` - interceptor in new development instead of the `post_list_instance_configs` interceptor. - When both interceptors are used, this `post_list_instance_configs_with_metadata` interceptor runs after the - `post_list_instance_configs` interceptor. The (possibly modified) response returned by - `post_list_instance_configs` will be passed to - `post_list_instance_configs_with_metadata`. - """ - return response, metadata - - def pre_list_instance_partition_operations(self, request: spanner_instance_admin.ListInstancePartitionOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_partition_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_partition_operations(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: - """Post-rpc interceptor for list_instance_partition_operations - - DEPRECATED. Please use the `post_list_instance_partition_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_partition_operations` interceptor runs - before the `post_list_instance_partition_operations_with_metadata` interceptor. - """ - return response - - def post_list_instance_partition_operations_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_partition_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_partition_operations_with_metadata` - interceptor in new development instead of the `post_list_instance_partition_operations` interceptor. - When both interceptors are used, this `post_list_instance_partition_operations_with_metadata` interceptor runs after the - `post_list_instance_partition_operations` interceptor. The (possibly modified) response returned by - `post_list_instance_partition_operations` will be passed to - `post_list_instance_partition_operations_with_metadata`. - """ - return response, metadata - - def pre_list_instance_partitions(self, request: spanner_instance_admin.ListInstancePartitionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_partitions - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_partitions(self, response: spanner_instance_admin.ListInstancePartitionsResponse) -> spanner_instance_admin.ListInstancePartitionsResponse: - """Post-rpc interceptor for list_instance_partitions - - DEPRECATED. Please use the `post_list_instance_partitions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_partitions` interceptor runs - before the `post_list_instance_partitions_with_metadata` interceptor. - """ - return response - - def post_list_instance_partitions_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_partitions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_partitions_with_metadata` - interceptor in new development instead of the `post_list_instance_partitions` interceptor. - When both interceptors are used, this `post_list_instance_partitions_with_metadata` interceptor runs after the - `post_list_instance_partitions` interceptor. The (possibly modified) response returned by - `post_list_instance_partitions` will be passed to - `post_list_instance_partitions_with_metadata`. - """ - return response, metadata - - def pre_list_instances(self, request: spanner_instance_admin.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instances - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instances(self, response: spanner_instance_admin.ListInstancesResponse) -> spanner_instance_admin.ListInstancesResponse: - """Post-rpc interceptor for list_instances - - DEPRECATED. Please use the `post_list_instances_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instances` interceptor runs - before the `post_list_instances_with_metadata` interceptor. - """ - return response - - def post_list_instances_with_metadata(self, response: spanner_instance_admin.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instances - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instances_with_metadata` - interceptor in new development instead of the `post_list_instances` interceptor. - When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the - `post_list_instances` interceptor. The (possibly modified) response returned by - `post_list_instances` will be passed to - `post_list_instances_with_metadata`. - """ - return response, metadata - - def pre_move_instance(self, request: spanner_instance_admin.MoveInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.MoveInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for move_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_move_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for move_instance - - DEPRECATED. Please use the `post_move_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_move_instance` interceptor runs - before the `post_move_instance_with_metadata` interceptor. - """ - return response - - def post_move_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for move_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_move_instance_with_metadata` - interceptor in new development instead of the `post_move_instance` interceptor. - When both interceptors are used, this `post_move_instance_with_metadata` interceptor runs after the - `post_move_instance` interceptor. The (possibly modified) response returned by - `post_move_instance` will be passed to - `post_move_instance_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_instance(self, request: spanner_instance_admin.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance - - DEPRECATED. Please use the `post_update_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_update_instance` interceptor runs - before the `post_update_instance_with_metadata` interceptor. - """ - return response - - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_update_instance_with_metadata` - interceptor in new development instead of the `post_update_instance` interceptor. - When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the - `post_update_instance` interceptor. The (possibly modified) response returned by - `post_update_instance` will be passed to - `post_update_instance_with_metadata`. - """ - return response, metadata - - def pre_update_instance_config(self, request: spanner_instance_admin.UpdateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_update_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance_config - - DEPRECATED. Please use the `post_update_instance_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_update_instance_config` interceptor runs - before the `post_update_instance_config_with_metadata` interceptor. - """ - return response - - def post_update_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_update_instance_config_with_metadata` - interceptor in new development instead of the `post_update_instance_config` interceptor. - When both interceptors are used, this `post_update_instance_config_with_metadata` interceptor runs after the - `post_update_instance_config` interceptor. The (possibly modified) response returned by - `post_update_instance_config` will be passed to - `post_update_instance_config_with_metadata`. - """ - return response, metadata - - def pre_update_instance_partition(self, request: spanner_instance_admin.UpdateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_update_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance_partition - - DEPRECATED. Please use the `post_update_instance_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_update_instance_partition` interceptor runs - before the `post_update_instance_partition_with_metadata` interceptor. - """ - return response - - def post_update_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_update_instance_partition_with_metadata` - interceptor in new development instead of the `post_update_instance_partition` interceptor. - When both interceptors are used, this `post_update_instance_partition_with_metadata` interceptor runs after the - `post_update_instance_partition` interceptor. The (possibly modified) response returned by - `post_update_instance_partition` will be passed to - `post_update_instance_partition_with_metadata`. - """ - return response, metadata - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class InstanceAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: InstanceAdminRestInterceptor - - -class InstanceAdminRestTransport(_BaseInstanceAdminRestTransport): - """REST backend synchronous transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[InstanceAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or InstanceAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateInstance(_BaseInstanceAdminRestTransport._BaseCreateInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CreateInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create instance method over HTTP. - - Args: - request (~.spanner_instance_admin.CreateInstanceRequest): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_http_options() - - request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateInstanceConfig(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CreateInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.CreateInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.CreateInstanceConfigRequest): - The request object. The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_create_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CreateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_instance_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_config", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstanceConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateInstancePartition(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CreateInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.CreateInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.CreateInstancePartitionRequest): - The request object. The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_create_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CreateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_instance_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_partition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstancePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteInstance(_BaseInstanceAdminRestTransport._BaseDeleteInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete instance method over HTTP. - - Args: - request (~.spanner_instance_admin.DeleteInstanceRequest): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_http_options() - - request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInstanceConfig(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.DeleteInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.DeleteInstanceConfigRequest): - The request object. The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_delete_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInstancePartition(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.DeleteInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.DeleteInstancePartitionRequest): - The request object. The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_delete_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetIamPolicy(_BaseInstanceAdminRestTransport._BaseGetIamPolicy, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_iam_policy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstance(_BaseInstanceAdminRestTransport._BaseGetInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.Instance: - r"""Call the get instance method over HTTP. - - Args: - request (~.spanner_instance_admin.GetInstanceRequest): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetInstance._get_http_options() - - request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.Instance() - pb_resp = spanner_instance_admin.Instance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.Instance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstanceConfig(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.GetInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.InstanceConfig: - r"""Call the get instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.GetInstanceConfigRequest): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_get_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.InstanceConfig() - pb_resp = spanner_instance_admin.InstanceConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.InstanceConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_config", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstanceConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstancePartition(_BaseInstanceAdminRestTransport._BaseGetInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.GetInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.InstancePartition: - r"""Call the get instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.GetInstancePartitionRequest): - The request object. The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.InstancePartition: - An isolated set of Cloud Spanner - resources that databases can define - placements on. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_get_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.InstancePartition() - pb_resp = spanner_instance_admin.InstancePartition.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.InstancePartition.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_partition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstancePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstanceConfigOperations(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstanceConfigOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstanceConfigOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: - r"""Call the list instance config - operations method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstanceConfigOperationsRequest): - The request object. The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstanceConfigOperationsResponse: - The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_config_operations(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstanceConfigOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstanceConfigOperationsResponse() - pb_resp = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_config_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_config_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_config_operations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstanceConfigs(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstanceConfigs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstanceConfigsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstanceConfigsResponse: - r"""Call the list instance configs method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstanceConfigsRequest): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstanceConfigsResponse: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_configs(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigs", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstanceConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstanceConfigsResponse() - pb_resp = spanner_instance_admin.ListInstanceConfigsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_configs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_configs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstanceConfigsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_configs", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstancePartitionOperations(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstancePartitionOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstancePartitionOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: - r"""Call the list instance partition - operations method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstancePartitionOperationsRequest): - The request object. The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstancePartitionOperationsResponse: - The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_partition_operations(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitionOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitionOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstancePartitionOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstancePartitionOperationsResponse() - pb_resp = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_partition_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_partition_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partition_operations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitionOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstancePartitions(_BaseInstanceAdminRestTransport._BaseListInstancePartitions, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstancePartitions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstancePartitionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstancePartitionsResponse: - r"""Call the list instance partitions method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstancePartitionsRequest): - The request object. The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstancePartitionsResponse: - The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_partitions(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstancePartitions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstancePartitionsResponse() - pb_resp = spanner_instance_admin.ListInstancePartitionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_partitions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_partitions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstancePartitionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partitions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstances(_BaseInstanceAdminRestTransport._BaseListInstances, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstances") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstancesResponse: - r"""Call the list instances method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstancesRequest): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstancesResponse: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstances._get_http_options() - - request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstances._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstances", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstances", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstancesResponse() - pb_resp = spanner_instance_admin.ListInstancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instances(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstancesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instances", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstances", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _MoveInstance(_BaseInstanceAdminRestTransport._BaseMoveInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.MoveInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.MoveInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the move instance method over HTTP. - - Args: - request (~.spanner_instance_admin.MoveInstanceRequest): - The request object. The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_http_options() - - request, metadata = self._interceptor.pre_move_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.MoveInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "MoveInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._MoveInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_move_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_move_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.move_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "MoveInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseInstanceAdminRestTransport._BaseSetIamPolicy, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.SetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.set_iam_policy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseInstanceAdminRestTransport._BaseTestIamPermissions, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.TestIamPermissions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.test_iam_permissions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstance(_BaseInstanceAdminRestTransport._BaseUpdateInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.UpdateInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance method over HTTP. - - Args: - request (~.spanner_instance_admin.UpdateInstanceRequest): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_http_options() - - request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstanceConfig(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.UpdateInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.UpdateInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.UpdateInstanceConfigRequest): - The request object. The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_update_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._UpdateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_config", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstanceConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstancePartition(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.UpdateInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.UpdateInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.UpdateInstancePartitionRequest): - The request object. The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_update_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._UpdateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_partition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstancePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - spanner_instance_admin.Instance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - spanner_instance_admin.InstanceConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - spanner_instance_admin.InstancePartition]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - spanner_instance_admin.ListInstanceConfigOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstanceConfigOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - spanner_instance_admin.ListInstanceConfigsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - spanner_instance_admin.ListInstancePartitionOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstancePartitionOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - spanner_instance_admin.ListInstancePartitionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstancePartitions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - spanner_instance_admin.ListInstancesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._MoveInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseInstanceAdminRestTransport._BaseCancelOperation, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CancelOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseInstanceAdminRestTransport._BaseDeleteOperation, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseInstanceAdminRestTransport._BaseGetOperation, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseInstanceAdminRestTransport._BaseListOperations, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'InstanceAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py deleted file mode 100644 index ef9a29eaf8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py +++ /dev/null @@ -1,1152 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseInstanceAdminRestTransport(InstanceAdminTransport): - """Base REST backend transport for InstanceAdmin. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/instances', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.CreateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/instanceConfigs', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.CreateInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.CreateInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.DeleteInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.DeleteInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.DeleteInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*}:getIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.GetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.GetInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.GetInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstanceConfigOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/instanceConfigOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstanceConfigs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/instanceConfigs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstanceConfigsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstancePartitionOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitionOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstancePartitions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstancePartitionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstances: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/instances', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstances._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseMoveInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*}:move', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.MoveInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseMoveInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/instances/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.UpdateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance_config.name=projects/*/instanceConfigs/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.UpdateInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.UpdateInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseInstanceAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py deleted file mode 100644 index 385c06fb99..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .common import ( - OperationProgress, - ReplicaSelection, - FulfillmentPeriod, -) -from .spanner_instance_admin import ( - AutoscalingConfig, - CreateInstanceConfigMetadata, - CreateInstanceConfigRequest, - CreateInstanceMetadata, - CreateInstancePartitionMetadata, - CreateInstancePartitionRequest, - CreateInstanceRequest, - DeleteInstanceConfigRequest, - DeleteInstancePartitionRequest, - DeleteInstanceRequest, - FreeInstanceMetadata, - GetInstanceConfigRequest, - GetInstancePartitionRequest, - GetInstanceRequest, - Instance, - InstanceConfig, - InstancePartition, - ListInstanceConfigOperationsRequest, - ListInstanceConfigOperationsResponse, - ListInstanceConfigsRequest, - ListInstanceConfigsResponse, - ListInstancePartitionOperationsRequest, - ListInstancePartitionOperationsResponse, - ListInstancePartitionsRequest, - ListInstancePartitionsResponse, - ListInstancesRequest, - ListInstancesResponse, - MoveInstanceMetadata, - MoveInstanceRequest, - MoveInstanceResponse, - ReplicaComputeCapacity, - ReplicaInfo, - UpdateInstanceConfigMetadata, - UpdateInstanceConfigRequest, - UpdateInstanceMetadata, - UpdateInstancePartitionMetadata, - UpdateInstancePartitionRequest, - UpdateInstanceRequest, -) - -__all__ = ( - 'OperationProgress', - 'ReplicaSelection', - 'FulfillmentPeriod', - 'AutoscalingConfig', - 'CreateInstanceConfigMetadata', - 'CreateInstanceConfigRequest', - 'CreateInstanceMetadata', - 'CreateInstancePartitionMetadata', - 'CreateInstancePartitionRequest', - 'CreateInstanceRequest', - 'DeleteInstanceConfigRequest', - 'DeleteInstancePartitionRequest', - 'DeleteInstanceRequest', - 'FreeInstanceMetadata', - 'GetInstanceConfigRequest', - 'GetInstancePartitionRequest', - 'GetInstanceRequest', - 'Instance', - 'InstanceConfig', - 'InstancePartition', - 'ListInstanceConfigOperationsRequest', - 'ListInstanceConfigOperationsResponse', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'ListInstancePartitionOperationsRequest', - 'ListInstancePartitionOperationsResponse', - 'ListInstancePartitionsRequest', - 'ListInstancePartitionsResponse', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MoveInstanceMetadata', - 'MoveInstanceRequest', - 'MoveInstanceResponse', - 'ReplicaComputeCapacity', - 'ReplicaInfo', - 'UpdateInstanceConfigMetadata', - 'UpdateInstanceConfigRequest', - 'UpdateInstanceMetadata', - 'UpdateInstancePartitionMetadata', - 'UpdateInstancePartitionRequest', - 'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py deleted file mode 100644 index 5bc1c6b158..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.instance.v1', - manifest={ - 'FulfillmentPeriod', - 'OperationProgress', - 'ReplicaSelection', - }, -) - - -class FulfillmentPeriod(proto.Enum): - r"""Indicates the expected fulfillment period of an operation. - - Values: - FULFILLMENT_PERIOD_UNSPECIFIED (0): - Not specified. - FULFILLMENT_PERIOD_NORMAL (1): - Normal fulfillment period. The operation is - expected to complete within minutes. - FULFILLMENT_PERIOD_EXTENDED (2): - Extended fulfillment period. It can take up - to an hour for the operation to complete. - """ - FULFILLMENT_PERIOD_UNSPECIFIED = 0 - FULFILLMENT_PERIOD_NORMAL = 1 - FULFILLMENT_PERIOD_EXTENDED = 2 - - -class OperationProgress(proto.Message): - r"""Encapsulates progress related information for a Cloud Spanner - long running instance operations. - - Attributes: - progress_percent (int): - Percent completion of the operation. - Values are between 0 and 100 inclusive. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time the request was received. - end_time (google.protobuf.timestamp_pb2.Timestamp): - If set, the time at which this operation - failed or was completed successfully. - """ - - progress_percent: int = proto.Field( - proto.INT32, - number=1, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class ReplicaSelection(proto.Message): - r"""ReplicaSelection identifies replicas with common properties. - - Attributes: - location (str): - Required. Name of the location of the - replicas (e.g., "us-central1"). - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py deleted file mode 100644 index b057141470..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ /dev/null @@ -1,2366 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.instance.v1', - manifest={ - 'ReplicaInfo', - 'InstanceConfig', - 'ReplicaComputeCapacity', - 'AutoscalingConfig', - 'Instance', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'GetInstanceConfigRequest', - 'CreateInstanceConfigRequest', - 'UpdateInstanceConfigRequest', - 'DeleteInstanceConfigRequest', - 'ListInstanceConfigOperationsRequest', - 'ListInstanceConfigOperationsResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'CreateInstanceMetadata', - 'UpdateInstanceMetadata', - 'FreeInstanceMetadata', - 'CreateInstanceConfigMetadata', - 'UpdateInstanceConfigMetadata', - 'InstancePartition', - 'CreateInstancePartitionMetadata', - 'CreateInstancePartitionRequest', - 'DeleteInstancePartitionRequest', - 'GetInstancePartitionRequest', - 'UpdateInstancePartitionRequest', - 'UpdateInstancePartitionMetadata', - 'ListInstancePartitionsRequest', - 'ListInstancePartitionsResponse', - 'ListInstancePartitionOperationsRequest', - 'ListInstancePartitionOperationsResponse', - 'MoveInstanceRequest', - 'MoveInstanceResponse', - 'MoveInstanceMetadata', - }, -) - - -class ReplicaInfo(proto.Message): - r""" - - Attributes: - location (str): - The location of the serving resources, e.g., - "us-central1". - type_ (google.cloud.spanner_admin_instance_v1.types.ReplicaInfo.ReplicaType): - The type of replica. - default_leader_location (bool): - If true, this location is designated as the default leader - location where leader replicas are placed. See the `region - types - documentation `__ - for more details. - """ - class ReplicaType(proto.Enum): - r"""Indicates the type of replica. See the `replica types - documentation `__ - for more details. - - Values: - TYPE_UNSPECIFIED (0): - Not specified. - READ_WRITE (1): - Read-write replicas support both reads and writes. These - replicas: - - - Maintain a full copy of your data. - - Serve reads. - - Can vote whether to commit a write. - - Participate in leadership election. - - Are eligible to become a leader. - READ_ONLY (2): - Read-only replicas only support reads (not writes). - Read-only replicas: - - - Maintain a full copy of your data. - - Serve reads. - - Do not participate in voting to commit writes. - - Are not eligible to become a leader. - WITNESS (3): - Witness replicas don't support reads but do participate in - voting to commit writes. Witness replicas: - - - Do not maintain a full copy of data. - - Do not serve reads. - - Vote whether to commit writes. - - Participate in leader election but are not eligible to - become leader. - """ - TYPE_UNSPECIFIED = 0 - READ_WRITE = 1 - READ_ONLY = 2 - WITNESS = 3 - - location: str = proto.Field( - proto.STRING, - number=1, - ) - type_: ReplicaType = proto.Field( - proto.ENUM, - number=2, - enum=ReplicaType, - ) - default_leader_location: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class InstanceConfig(proto.Message): - r"""A possible configuration for a Cloud Spanner instance. - Configurations define the geographic placement of nodes and - their replication. - - Attributes: - name (str): - A unique identifier for the instance configuration. Values - are of the form - ``projects//instanceConfigs/[a-z][-a-z0-9]*``. - - User instance configuration must start with ``custom-``. - display_name (str): - The name of this instance configuration as it - appears in UIs. - config_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.Type): - Output only. Whether this instance - configuration is a Google-managed or - user-managed configuration. - replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): - The geographic placement of nodes in this instance - configuration and their replication properties. - - To create user-managed configurations, input ``replicas`` - must include all replicas in ``replicas`` of the - ``base_config`` and include one or more replicas in the - ``optional_replicas`` of the ``base_config``. - optional_replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): - Output only. The available optional replicas - to choose from for user-managed configurations. - Populated for Google-managed configurations. - base_config (str): - Base configuration name, e.g. - projects//instanceConfigs/nam3, based on which - this configuration is created. Only set for user-managed - configurations. ``base_config`` must refer to a - configuration of type ``GOOGLE_MANAGED`` in the same project - as this configuration. - labels (MutableMapping[str, str]): - Cloud Labels are a flexible and lightweight mechanism for - organizing cloud resources into groups that reflect a - customer's organizational needs and deployment strategies. - Cloud Labels can be used to filter collections of resources. - They can be used to control how resource metrics are - aggregated. And they can be used as arguments to policy - management rules (e.g. route, firewall, load balancing, - etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z][a-z0-9_-]{0,62}``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``[a-z0-9_-]{0,63}``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. - Therefore, you are advised to use an internal label - representation, such as JSON, which doesn't rely upon - specific characters being disallowed. For example, - representing labels as the string: name + "*" + value would - prove problematic if we were to allow "*" in a future - release. - etag (str): - etag is used for optimistic concurrency - control as a way to help prevent simultaneous - updates of a instance configuration from - overwriting each other. It is strongly suggested - that systems make use of the etag in the - read-modify-write cycle to perform instance - configuration updates in order to avoid race - conditions: An etag is returned in the response - which contains instance configurations, and - systems are expected to put that etag in the - request to update instance configuration to - ensure that their change is applied to the same - version of the instance configuration. If no - etag is provided in the call to update the - instance configuration, then the existing - instance configuration is overwritten blindly. - leader_options (MutableSequence[str]): - Allowed values of the "default_leader" schema option for - databases in instances that use this instance configuration. - reconciling (bool): - Output only. If true, the instance - configuration is being created or updated. If - false, there are no ongoing operations for the - instance configuration. - state (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.State): - Output only. The current instance configuration state. - Applicable only for ``USER_MANAGED`` configurations. - free_instance_availability (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.FreeInstanceAvailability): - Output only. Describes whether free instances - are available to be created in this instance - configuration. - quorum_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.QuorumType): - Output only. The ``QuorumType`` of the instance - configuration. - storage_limit_per_processing_unit (int): - Output only. The storage limit in bytes per - processing unit. - """ - class Type(proto.Enum): - r"""The type of this configuration. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - GOOGLE_MANAGED (1): - Google-managed configuration. - USER_MANAGED (2): - User-managed configuration. - """ - TYPE_UNSPECIFIED = 0 - GOOGLE_MANAGED = 1 - USER_MANAGED = 2 - - class State(proto.Enum): - r"""Indicates the current state of the instance configuration. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The instance configuration is still being - created. - READY (2): - The instance configuration is fully created - and ready to be used to create instances. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - class FreeInstanceAvailability(proto.Enum): - r"""Describes the availability for free instances to be created - in an instance configuration. - - Values: - FREE_INSTANCE_AVAILABILITY_UNSPECIFIED (0): - Not specified. - AVAILABLE (1): - Indicates that free instances are available - to be created in this instance configuration. - UNSUPPORTED (2): - Indicates that free instances are not - supported in this instance configuration. - DISABLED (3): - Indicates that free instances are currently - not available to be created in this instance - configuration. - QUOTA_EXCEEDED (4): - Indicates that additional free instances - cannot be created in this instance configuration - because the project has reached its limit of - free instances. - """ - FREE_INSTANCE_AVAILABILITY_UNSPECIFIED = 0 - AVAILABLE = 1 - UNSUPPORTED = 2 - DISABLED = 3 - QUOTA_EXCEEDED = 4 - - class QuorumType(proto.Enum): - r"""Indicates the quorum type of this instance configuration. - - Values: - QUORUM_TYPE_UNSPECIFIED (0): - Quorum type not specified. - REGION (1): - An instance configuration tagged with ``REGION`` quorum type - forms a write quorum in a single region. - DUAL_REGION (2): - An instance configuration tagged with the ``DUAL_REGION`` - quorum type forms a write quorum with exactly two read-write - regions in a multi-region configuration. - - This instance configuration requires failover in the event - of regional failures. - MULTI_REGION (3): - An instance configuration tagged with the ``MULTI_REGION`` - quorum type forms a write quorum from replicas that are - spread across more than one region in a multi-region - configuration. - """ - QUORUM_TYPE_UNSPECIFIED = 0 - REGION = 1 - DUAL_REGION = 2 - MULTI_REGION = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - config_type: Type = proto.Field( - proto.ENUM, - number=5, - enum=Type, - ) - replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ReplicaInfo', - ) - optional_replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ReplicaInfo', - ) - base_config: str = proto.Field( - proto.STRING, - number=7, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - etag: str = proto.Field( - proto.STRING, - number=9, - ) - leader_options: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - reconciling: bool = proto.Field( - proto.BOOL, - number=10, - ) - state: State = proto.Field( - proto.ENUM, - number=11, - enum=State, - ) - free_instance_availability: FreeInstanceAvailability = proto.Field( - proto.ENUM, - number=12, - enum=FreeInstanceAvailability, - ) - quorum_type: QuorumType = proto.Field( - proto.ENUM, - number=18, - enum=QuorumType, - ) - storage_limit_per_processing_unit: int = proto.Field( - proto.INT64, - number=19, - ) - - -class ReplicaComputeCapacity(proto.Message): - r"""ReplicaComputeCapacity describes the amount of server - resources that are allocated to each replica identified by the - replica selection. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): - Required. Identifies replicas by specified - properties. All replicas in the selection have - the same amount of compute capacity. - node_count (int): - The number of nodes allocated to each replica. - - This may be zero in API responses for instances that are not - yet in state ``READY``. - - This field is a member of `oneof`_ ``compute_capacity``. - processing_units (int): - The number of processing units allocated to each replica. - - This may be zero in API responses for instances that are not - yet in state ``READY``. - - This field is a member of `oneof`_ ``compute_capacity``. - """ - - replica_selection: common.ReplicaSelection = proto.Field( - proto.MESSAGE, - number=1, - message=common.ReplicaSelection, - ) - node_count: int = proto.Field( - proto.INT32, - number=2, - oneof='compute_capacity', - ) - processing_units: int = proto.Field( - proto.INT32, - number=3, - oneof='compute_capacity', - ) - - -class AutoscalingConfig(proto.Message): - r"""Autoscaling configuration for an instance. - - Attributes: - autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): - Required. Autoscaling limits for an instance. - autoscaling_targets (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingTargets): - Required. The autoscaling targets for an - instance. - asymmetric_autoscaling_options (MutableSequence[google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption]): - Optional. Optional asymmetric autoscaling - options. Replicas matching the replica selection - criteria will be autoscaled independently from - other replicas. The autoscaler will scale the - replicas based on the utilization of replicas - identified by the replica selection. Replica - selections should not overlap with each other. - - Other replicas (those do not match any replica - selection) will be autoscaled together and will - have the same compute capacity allocated to - them. - """ - - class AutoscalingLimits(proto.Message): - r"""The autoscaling limits for the instance. Users can define the - minimum and maximum compute capacity allocated to the instance, and - the autoscaler will only scale within that range. Users can either - use nodes or processing units to specify the limits, but should use - the same unit to set both the min_limit and max_limit. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - min_nodes (int): - Minimum number of nodes allocated to the - instance. If set, this number should be greater - than or equal to 1. - - This field is a member of `oneof`_ ``min_limit``. - min_processing_units (int): - Minimum number of processing units allocated - to the instance. If set, this number should be - multiples of 1000. - - This field is a member of `oneof`_ ``min_limit``. - max_nodes (int): - Maximum number of nodes allocated to the instance. If set, - this number should be greater than or equal to min_nodes. - - This field is a member of `oneof`_ ``max_limit``. - max_processing_units (int): - Maximum number of processing units allocated to the - instance. If set, this number should be multiples of 1000 - and be greater than or equal to min_processing_units. - - This field is a member of `oneof`_ ``max_limit``. - """ - - min_nodes: int = proto.Field( - proto.INT32, - number=1, - oneof='min_limit', - ) - min_processing_units: int = proto.Field( - proto.INT32, - number=2, - oneof='min_limit', - ) - max_nodes: int = proto.Field( - proto.INT32, - number=3, - oneof='max_limit', - ) - max_processing_units: int = proto.Field( - proto.INT32, - number=4, - oneof='max_limit', - ) - - class AutoscalingTargets(proto.Message): - r"""The autoscaling targets for an instance. - - Attributes: - high_priority_cpu_utilization_percent (int): - Required. The target high priority cpu utilization - percentage that the autoscaler should be trying to achieve - for the instance. This number is on a scale from 0 (no - utilization) to 100 (full utilization). The valid range is - [10, 90] inclusive. - storage_utilization_percent (int): - Required. The target storage utilization percentage that the - autoscaler should be trying to achieve for the instance. - This number is on a scale from 0 (no utilization) to 100 - (full utilization). The valid range is [10, 99] inclusive. - """ - - high_priority_cpu_utilization_percent: int = proto.Field( - proto.INT32, - number=1, - ) - storage_utilization_percent: int = proto.Field( - proto.INT32, - number=2, - ) - - class AsymmetricAutoscalingOption(proto.Message): - r"""AsymmetricAutoscalingOption specifies the scaling of replicas - identified by the given selection. - - Attributes: - replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): - Required. Selects the replicas to which this - AsymmetricAutoscalingOption applies. Only - read-only replicas are supported. - overrides (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides): - Optional. Overrides applied to the top-level - autoscaling configuration for the selected - replicas. - """ - - class AutoscalingConfigOverrides(proto.Message): - r"""Overrides the top-level autoscaling configuration for the replicas - identified by ``replica_selection``. All fields in this message are - optional. Any unspecified fields will use the corresponding values - from the top-level autoscaling configuration. - - Attributes: - autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): - Optional. If specified, overrides the min/max - limit in the top-level autoscaling configuration - for the selected replicas. - autoscaling_target_high_priority_cpu_utilization_percent (int): - Optional. If specified, overrides the autoscaling target - high_priority_cpu_utilization_percent in the top-level - autoscaling configuration for the selected replicas. - """ - - autoscaling_limits: 'AutoscalingConfig.AutoscalingLimits' = proto.Field( - proto.MESSAGE, - number=1, - message='AutoscalingConfig.AutoscalingLimits', - ) - autoscaling_target_high_priority_cpu_utilization_percent: int = proto.Field( - proto.INT32, - number=2, - ) - - replica_selection: common.ReplicaSelection = proto.Field( - proto.MESSAGE, - number=1, - message=common.ReplicaSelection, - ) - overrides: 'AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides' = proto.Field( - proto.MESSAGE, - number=2, - message='AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides', - ) - - autoscaling_limits: AutoscalingLimits = proto.Field( - proto.MESSAGE, - number=1, - message=AutoscalingLimits, - ) - autoscaling_targets: AutoscalingTargets = proto.Field( - proto.MESSAGE, - number=2, - message=AutoscalingTargets, - ) - asymmetric_autoscaling_options: MutableSequence[AsymmetricAutoscalingOption] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=AsymmetricAutoscalingOption, - ) - - -class Instance(proto.Message): - r"""An isolated set of Cloud Spanner resources on which databases - can be hosted. - - Attributes: - name (str): - Required. A unique identifier for the instance, which cannot - be changed after the instance is created. Values are of the - form - ``projects//instances/[a-z][-a-z0-9]*[a-z0-9]``. - The final segment of the name must be between 2 and 64 - characters in length. - config (str): - Required. The name of the instance's configuration. Values - are of the form - ``projects//instanceConfigs/``. See - also - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - and - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - display_name (str): - Required. The descriptive name for this - instance as it appears in UIs. Must be unique - per project and between 4 and 30 characters in - length. - node_count (int): - The number of nodes allocated to this instance. At most, one - of either ``node_count`` or ``processing_units`` should be - present in the message. - - Users can set the ``node_count`` field to specify the target - number of nodes allocated to the instance. - - If autoscaling is enabled, ``node_count`` is treated as an - ``OUTPUT_ONLY`` field and reflects the current number of - nodes allocated to the instance. - - This might be zero in API responses for instances that are - not yet in the ``READY`` state. - - For more information, see `Compute capacity, nodes, and - processing - units `__. - processing_units (int): - The number of processing units allocated to this instance. - At most, one of either ``processing_units`` or - ``node_count`` should be present in the message. - - Users can set the ``processing_units`` field to specify the - target number of processing units allocated to the instance. - - If autoscaling is enabled, ``processing_units`` is treated - as an ``OUTPUT_ONLY`` field and reflects the current number - of processing units allocated to the instance. - - This might be zero in API responses for instances that are - not yet in the ``READY`` state. - - For more information, see `Compute capacity, nodes and - processing - units `__. - replica_compute_capacity (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaComputeCapacity]): - Output only. Lists the compute capacity per - ReplicaSelection. A replica selection identifies - a set of replicas with common properties. - Replicas identified by a ReplicaSelection are - scaled with the same compute capacity. - autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): - Optional. The autoscaling configuration. Autoscaling is - enabled if this field is set. When autoscaling is enabled, - node_count and processing_units are treated as OUTPUT_ONLY - fields and reflect the current compute capacity allocated to - the instance. - state (google.cloud.spanner_admin_instance_v1.types.Instance.State): - Output only. The current instance state. For - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], - the state must be either omitted or set to ``CREATING``. For - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], - the state must be either omitted or set to ``READY``. - labels (MutableMapping[str, str]): - Cloud Labels are a flexible and lightweight mechanism for - organizing cloud resources into groups that reflect a - customer's organizational needs and deployment strategies. - Cloud Labels can be used to filter collections of resources. - They can be used to control how resource metrics are - aggregated. And they can be used as arguments to policy - management rules (e.g. route, firewall, load balancing, - etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z][a-z0-9_-]{0,62}``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``[a-z0-9_-]{0,63}``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. And so - you are advised to use an internal label representation, - such as JSON, which doesn't rely upon specific characters - being disallowed. For example, representing labels as the - string: name + "*" + value would prove problematic if we - were to allow "*" in a future release. - instance_type (google.cloud.spanner_admin_instance_v1.types.Instance.InstanceType): - The ``InstanceType`` of the current instance. - endpoint_uris (MutableSequence[str]): - Deprecated. This field is not populated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - was most recently updated. - free_instance_metadata (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata): - Free instance metadata. Only populated for - free instances. - edition (google.cloud.spanner_admin_instance_v1.types.Instance.Edition): - Optional. The ``Edition`` of the current instance. - default_backup_schedule_type (google.cloud.spanner_admin_instance_v1.types.Instance.DefaultBackupScheduleType): - Optional. Controls the default backup schedule behavior for - new databases within the instance. By default, a backup - schedule is created automatically when a new database is - created in a new instance. - - Note that the ``AUTOMATIC`` value isn't permitted for free - instances, as backups and backup schedules aren't supported - for free instances. - - In the ``GetInstance`` or ``ListInstances`` response, if the - value of ``default_backup_schedule_type`` isn't set, or set - to ``NONE``, Spanner doesn't create a default backup - schedule for new databases in the instance. - """ - class State(proto.Enum): - r"""Indicates the current state of the instance. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The instance is still being created. - Resources may not be available yet, and - operations such as database creation may not - work. - READY (2): - The instance is fully created and ready to do - work such as creating databases. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - class InstanceType(proto.Enum): - r"""The type of this instance. The type can be used to distinguish - product variants, that can affect aspects like: usage restrictions, - quotas and billing. Currently this is used to distinguish - FREE_INSTANCE vs PROVISIONED instances. - - Values: - INSTANCE_TYPE_UNSPECIFIED (0): - Not specified. - PROVISIONED (1): - Provisioned instances have dedicated - resources, standard usage limits and support. - FREE_INSTANCE (2): - Free instances provide no guarantee for dedicated resources, - [node_count, processing_units] should be 0. They come with - stricter usage limits and limited support. - """ - INSTANCE_TYPE_UNSPECIFIED = 0 - PROVISIONED = 1 - FREE_INSTANCE = 2 - - class Edition(proto.Enum): - r"""The edition selected for this instance. Different editions - provide different capabilities at different price points. - - Values: - EDITION_UNSPECIFIED (0): - Edition not specified. - STANDARD (1): - Standard edition. - ENTERPRISE (2): - Enterprise edition. - ENTERPRISE_PLUS (3): - Enterprise Plus edition. - """ - EDITION_UNSPECIFIED = 0 - STANDARD = 1 - ENTERPRISE = 2 - ENTERPRISE_PLUS = 3 - - class DefaultBackupScheduleType(proto.Enum): - r"""Indicates the `default backup - schedule `__ - behavior for new databases within the instance. - - Values: - DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED (0): - Not specified. - NONE (1): - A default backup schedule isn't created - automatically when a new database is created in - the instance. - AUTOMATIC (2): - A default backup schedule is created - automatically when a new database is created in - the instance. The default backup schedule - creates a full backup every 24 hours. These full - backups are retained for 7 days. You can edit or - delete the default backup schedule once it's - created. - """ - DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED = 0 - NONE = 1 - AUTOMATIC = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - node_count: int = proto.Field( - proto.INT32, - number=5, - ) - processing_units: int = proto.Field( - proto.INT32, - number=9, - ) - replica_compute_capacity: MutableSequence['ReplicaComputeCapacity'] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message='ReplicaComputeCapacity', - ) - autoscaling_config: 'AutoscalingConfig' = proto.Field( - proto.MESSAGE, - number=17, - message='AutoscalingConfig', - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - instance_type: InstanceType = proto.Field( - proto.ENUM, - number=10, - enum=InstanceType, - ) - endpoint_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - free_instance_metadata: 'FreeInstanceMetadata' = proto.Field( - proto.MESSAGE, - number=13, - message='FreeInstanceMetadata', - ) - edition: Edition = proto.Field( - proto.ENUM, - number=20, - enum=Edition, - ) - default_backup_schedule_type: DefaultBackupScheduleType = proto.Field( - proto.ENUM, - number=23, - enum=DefaultBackupScheduleType, - ) - - -class ListInstanceConfigsRequest(proto.Message): - r"""The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Attributes: - parent (str): - Required. The name of the project for which a list of - supported instance configurations is requested. Values are - of the form ``projects/``. - page_size (int): - Number of instance configurations to be - returned in the response. If 0 or less, defaults - to the server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] - from a previous - [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInstanceConfigsResponse(proto.Message): - r"""The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Attributes: - instance_configs (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstanceConfig]): - The list of requested instance - configurations. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] - call to fetch more of the matching instance configurations. - """ - - @property - def raw_page(self): - return self - - instance_configs: MutableSequence['InstanceConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstanceConfigRequest(proto.Message): - r"""The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - - Attributes: - name (str): - Required. The name of the requested instance configuration. - Values are of the form - ``projects//instanceConfigs/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceConfigRequest(proto.Message): - r"""The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - - Attributes: - parent (str): - Required. The name of the project in which to create the - instance configuration. Values are of the form - ``projects/``. - instance_config_id (str): - Required. The ID of the instance configuration to create. - Valid identifiers are of the form - ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 - characters in length. The ``custom-`` prefix is required to - avoid name conflicts with Google-managed configurations. - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The ``InstanceConfig`` proto of the configuration - to create. ``instance_config.name`` must be - ``/instanceConfigs/``. - ``instance_config.base_config`` must be a Google-managed - configuration name, e.g. /instanceConfigs/us-east1, - /instanceConfigs/nam3. - validate_only (bool): - An option to validate, but not actually - execute, a request, and provide the same - response. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_config_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InstanceConfig', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateInstanceConfigRequest(proto.Message): - r"""The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - - Attributes: - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The user instance configuration to update, which - must always include the instance configuration name. - Otherwise, only fields mentioned in - [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] - need be included. To prevent conflicts of concurrent - updates, - [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - can be used. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - should be updated. The field mask must always be specified; - this prevents any future fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - from being erased accidentally by clients that do not know - about them. Only display_name and labels can be updated. - validate_only (bool): - An option to validate, but not actually - execute, a request, and provide the same - response. - """ - - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteInstanceConfigRequest(proto.Message): - r"""The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - - Attributes: - name (str): - Required. The name of the instance configuration to be - deleted. Values are of the form - ``projects//instanceConfigs/`` - etag (str): - Used for optimistic concurrency control as a - way to help prevent simultaneous deletes of an - instance configuration from overwriting each - other. If not empty, the API - only deletes the instance configuration when the - etag provided matches the current status of the - requested instance configuration. Otherwise, - deletes the instance configuration without - checking the current status of the requested - instance configuration. - validate_only (bool): - An option to validate, but not actually - execute, a request, and provide the same - response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ListInstanceConfigOperationsRequest(proto.Message): - r"""The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Attributes: - parent (str): - Required. The project of the instance configuration - operations. Values are of the form ``projects/``. - filter (str): - An expression that filters the list of returned operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the Operation are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata] - is - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first, if filtering - on metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=`` - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata) AND`` - ``(metadata.instance_config.name:custom-config) AND`` - ``(metadata.progress.start_time < \"2021-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - - The instance configuration name contains - "custom-config". - - The operation started before 2021-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse.next_page_token] - from a previous - [ListInstanceConfigOperationsResponse][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListInstanceConfigOperationsResponse(proto.Message): - r"""The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching instance configuration long-running - operations. Each operation's name will be prefixed by the - name of the instance configuration. The operation's metadata - field type ``metadata.type_url`` describes the type of the - metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstanceRequest(proto.Message): - r"""The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - - Attributes: - name (str): - Required. The name of the requested instance. Values are of - the form ``projects//instances/``. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - If field_mask is present, specifies the subset of - [Instance][google.spanner.admin.instance.v1.Instance] fields - that should be returned. If absent, all - [Instance][google.spanner.admin.instance.v1.Instance] fields - are returned. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class CreateInstanceRequest(proto.Message): - r"""The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - - Attributes: - parent (str): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - instance_id (str): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and - must be between 2 and 64 characters in length. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to create. The name may be omitted, - but if specified must be - ``/instances/``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=3, - message='Instance', - ) - - -class ListInstancesRequest(proto.Message): - r"""The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Attributes: - parent (str): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - page_size (int): - Number of instances to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] - from a previous - [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. - filter (str): - An expression for filtering the results of the request. - Filter rules are case insensitive. The fields eligible for - filtering are: - - - ``name`` - - ``display_name`` - - ``labels.key`` where key is the name of a label - - Some examples of using filters are: - - - ``name:*`` --> The instance has a name. - - ``name:Howl`` --> The instance's name contains the string - "howl". - - ``name:HOWL`` --> Equivalent to above. - - ``NAME:howl`` --> Equivalent to above. - - ``labels.env:*`` --> The instance has the label "env". - - ``labels.env:dev`` --> The instance has the label "env" - and the value of the label contains the string "dev". - - ``name:howl labels.env:dev`` --> The instance's name - contains "howl" and it has the label "env" with its value - containing "dev". - instance_deadline (google.protobuf.timestamp_pb2.Timestamp): - Deadline used while retrieving metadata for instances. - Instances whose metadata cannot be retrieved within this - deadline will be added to - [unreachable][google.spanner.admin.instance.v1.ListInstancesResponse.unreachable] - in - [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - instance_deadline: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancesResponse(proto.Message): - r"""The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Attributes: - instances (MutableSequence[google.cloud.spanner_admin_instance_v1.types.Instance]): - The list of requested instances. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] - call to fetch more of the matching instances. - unreachable (MutableSequence[str]): - The list of unreachable instances. It includes the names of - instances whose metadata could not be retrieved within - [instance_deadline][google.spanner.admin.instance.v1.ListInstancesRequest.instance_deadline]. - """ - - @property - def raw_page(self): - return self - - instances: MutableSequence['Instance'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Instance', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateInstanceRequest(proto.Message): - r"""The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to update, which must always include - the instance name. Otherwise, only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] should - be updated. The field mask must always be specified; this - prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] from - being erased accidentally by clients that do not know about - them. - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteInstanceRequest(proto.Message): - r"""The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - - Attributes: - name (str): - Required. The name of the instance to be deleted. Values are - of the form ``projects//instances/`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - The instance being created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): - The expected fulfillment period of this - create operation. - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( - proto.ENUM, - number=5, - enum=common.FulfillmentPeriod, - ) - - -class UpdateInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - The desired end state of the update. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): - The expected fulfillment period of this - update operation. - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( - proto.ENUM, - number=5, - enum=common.FulfillmentPeriod, - ) - - -class FreeInstanceMetadata(proto.Message): - r"""Free instance specific metadata that is kept even after an - instance has been upgraded for tracking purposes. - - Attributes: - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp after which the - instance will either be upgraded or scheduled - for deletion after a grace period. - ExpireBehavior is used to choose between - upgrading or scheduling the free instance for - deletion. This timestamp is set during the - creation of a free instance. - upgrade_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If present, the timestamp at - which the free instance was upgraded to a - provisioned instance. - expire_behavior (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata.ExpireBehavior): - Specifies the expiration behavior of a free instance. The - default of ExpireBehavior is ``REMOVE_AFTER_GRACE_PERIOD``. - This can be modified during or after creation, and before - expiration. - """ - class ExpireBehavior(proto.Enum): - r"""Allows users to change behavior when a free instance expires. - - Values: - EXPIRE_BEHAVIOR_UNSPECIFIED (0): - Not specified. - FREE_TO_PROVISIONED (1): - When the free instance expires, upgrade the - instance to a provisioned instance. - REMOVE_AFTER_GRACE_PERIOD (2): - When the free instance expires, disable the - instance, and delete it after the grace period - passes if it has not been upgraded. - """ - EXPIRE_BEHAVIOR_UNSPECIFIED = 0 - FREE_TO_PROVISIONED = 1 - REMOVE_AFTER_GRACE_PERIOD = 2 - - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - upgrade_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - expire_behavior: ExpireBehavior = proto.Field( - proto.ENUM, - number=3, - enum=ExpireBehavior, - ) - - -class CreateInstanceConfigMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - - Attributes: - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - The target instance configuration end state. - progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): - The progress of the - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. - """ - - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateInstanceConfigMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - - Attributes: - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - The desired instance configuration after - updating. - progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): - The progress of the - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. - """ - - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class InstancePartition(proto.Message): - r"""An isolated set of Cloud Spanner resources that databases can - define placements on. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. A unique identifier for the instance partition. - Values are of the form - ``projects//instances//instancePartitions/[a-z][-a-z0-9]*[a-z0-9]``. - The final segment of the name must be between 2 and 64 - characters in length. An instance partition's name cannot be - changed after the instance partition is created. - config (str): - Required. The name of the instance partition's - configuration. Values are of the form - ``projects//instanceConfigs/``. See - also - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - and - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - display_name (str): - Required. The descriptive name for this - instance partition as it appears in UIs. Must be - unique per project and between 4 and 30 - characters in length. - node_count (int): - The number of nodes allocated to this instance partition. - - Users can set the ``node_count`` field to specify the target - number of nodes allocated to the instance partition. - - This may be zero in API responses for instance partitions - that are not yet in state ``READY``. - - This field is a member of `oneof`_ ``compute_capacity``. - processing_units (int): - The number of processing units allocated to this instance - partition. - - Users can set the ``processing_units`` field to specify the - target number of processing units allocated to the instance - partition. - - This might be zero in API responses for instance partitions - that are not yet in the ``READY`` state. - - This field is a member of `oneof`_ ``compute_capacity``. - state (google.cloud.spanner_admin_instance_v1.types.InstancePartition.State): - Output only. The current instance partition - state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - partition was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - partition was most recently updated. - referencing_databases (MutableSequence[str]): - Output only. The names of the databases that - reference this instance partition. Referencing - databases should share the parent instance. The - existence of any referencing database prevents - the instance partition from being deleted. - referencing_backups (MutableSequence[str]): - Output only. Deprecated: This field is not - populated. Output only. The names of the backups - that reference this instance partition. - Referencing backups should share the parent - instance. The existence of any referencing - backup prevents the instance partition from - being deleted. - etag (str): - Used for optimistic concurrency control as a - way to help prevent simultaneous updates of a - instance partition from overwriting each other. - It is strongly suggested that systems make use - of the etag in the read-modify-write cycle to - perform instance partition updates in order to - avoid race conditions: An etag is returned in - the response which contains instance partitions, - and systems are expected to put that etag in the - request to update instance partitions to ensure - that their change will be applied to the same - version of the instance partition. If no etag is - provided in the call to update instance - partition, then the existing instance partition - is overwritten blindly. - """ - class State(proto.Enum): - r"""Indicates the current state of the instance partition. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The instance partition is still being - created. Resources may not be available yet, and - operations such as creating placements using - this instance partition may not work. - READY (2): - The instance partition is fully created and - ready to do work such as creating placements and - using in databases. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - node_count: int = proto.Field( - proto.INT32, - number=5, - oneof='compute_capacity', - ) - processing_units: int = proto.Field( - proto.INT32, - number=6, - oneof='compute_capacity', - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - referencing_databases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=10, - ) - referencing_backups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=11, - ) - etag: str = proto.Field( - proto.STRING, - number=12, - ) - - -class CreateInstancePartitionMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - - Attributes: - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - The instance partition being created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - """ - - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class CreateInstancePartitionRequest(proto.Message): - r"""The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - - Attributes: - parent (str): - Required. The name of the instance in which to create the - instance partition. Values are of the form - ``projects//instances/``. - instance_partition_id (str): - Required. The ID of the instance partition to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and - must be between 2 and 64 characters in length. - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to create. The - instance_partition.name may be omitted, but if specified - must be - ``/instancePartitions/``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_partition_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=3, - message='InstancePartition', - ) - - -class DeleteInstancePartitionRequest(proto.Message): - r"""The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - - Attributes: - name (str): - Required. The name of the instance partition to be deleted. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` - etag (str): - Optional. If not empty, the API only deletes - the instance partition when the etag provided - matches the current status of the requested - instance partition. Otherwise, deletes the - instance partition without checking the current - status of the requested instance partition. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstancePartitionRequest(proto.Message): - r"""The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - - Attributes: - name (str): - Required. The name of the requested instance partition. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateInstancePartitionRequest(proto.Message): - r"""The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - - Attributes: - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to update, which must - always include the instance partition name. Otherwise, only - fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] - need be included. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - should be updated. The field mask must always be specified; - this prevents any future fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - from being erased accidentally by clients that do not know - about them. - """ - - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class UpdateInstancePartitionMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - - Attributes: - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - The desired end state of the update. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - """ - - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancePartitionsRequest(proto.Message): - r"""The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Attributes: - parent (str): - Required. The instance whose instance partitions should be - listed. Values are of the form - ``projects//instances/``. Use - ``{instance} = '-'`` to list instance partitions for all - Instances in a project, e.g., - ``projects/myproject/instances/-``. - page_size (int): - Number of instance partitions to be returned - in the response. If 0 or less, defaults to the - server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.next_page_token] - from a previous - [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. - instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): - Optional. Deadline used while retrieving metadata for - instance partitions. Instance partitions whose metadata - cannot be retrieved within this deadline will be added to - [unreachable][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.unreachable] - in - [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancePartitionsResponse(proto.Message): - r"""The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Attributes: - instance_partitions (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstancePartition]): - The list of requested instancePartitions. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions] - call to fetch more of the matching instance partitions. - unreachable (MutableSequence[str]): - The list of unreachable instances or instance partitions. It - includes the names of instances or instance partitions whose - metadata could not be retrieved within - [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionsRequest.instance_partition_deadline]. - """ - - @property - def raw_page(self): - return self - - instance_partitions: MutableSequence['InstancePartition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class ListInstancePartitionOperationsRequest(proto.Message): - r"""The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Attributes: - parent (str): - Required. The parent instance of the instance partition - operations. Values are of the form - ``projects//instances/``. - filter (str): - Optional. An expression that filters the list of returned - operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the Operation are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata] - is - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first, if filtering - on metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=`` - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata) AND`` - ``(metadata.instance_partition.name:custom-instance-partition) AND`` - ``(metadata.start_time < \"2021-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - - The instance partition name contains - "custom-instance-partition". - - The operation started before 2021-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Optional. Number of operations to be returned - in the response. If 0 or less, defaults to the - server's maximum allowed page size. - page_token (str): - Optional. If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.next_page_token] - from a previous - [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse] - to the same ``parent`` and with the same ``filter``. - instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): - Optional. Deadline used while retrieving metadata for - instance partition operations. Instance partitions whose - operation metadata cannot be retrieved within this deadline - will be added to - [unreachable_instance_partitions][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.unreachable_instance_partitions] - in - [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancePartitionOperationsResponse(proto.Message): - r"""The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching instance partition long-running - operations. Each operation's name will be prefixed by the - instance partition's name. The operation's metadata field - type ``metadata.type_url`` describes the type of the - metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations] - call to fetch more of the matching metadata. - unreachable_instance_partitions (MutableSequence[str]): - The list of unreachable instance partitions. It includes the - names of instance partitions whose operation metadata could - not be retrieved within - [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.instance_partition_deadline]. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_instance_partitions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class MoveInstanceRequest(proto.Message): - r"""The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - Attributes: - name (str): - Required. The instance to move. Values are of the form - ``projects//instances/``. - target_config (str): - Required. The target instance configuration where to move - the instance. Values are of the form - ``projects//instanceConfigs/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - target_config: str = proto.Field( - proto.STRING, - number=2, - ) - - -class MoveInstanceResponse(proto.Message): - r"""The response for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - """ - - -class MoveInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - Attributes: - target_config (str): - The target instance configuration where to move the - instance. Values are of the form - ``projects//instanceConfigs/``. - progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): - The progress of the - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance] - operation. - [progress_percent][google.spanner.admin.instance.v1.OperationProgress.progress_percent] - is reset when cancellation is requested. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. - """ - - target_config: str = proto.Field( - proto.STRING, - number=1, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py deleted file mode 100644 index 5918b3e5f7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] - -DEFAULT_PYTHON_VERSION = "3.14" - -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): -# Switch this to Python 3.15 alpha1 -# https://peps.python.org/pep-0790/ -PREVIEW_PYTHON_VERSION = "3.14" - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-spanner-admin-instance" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=PREVIEW_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json deleted file mode 100644 index f58e9794e2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ /dev/null @@ -1,3450 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.spanner.admin.instance.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-spanner-admin-instance", - "version": "0.0.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "instance_config_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance_config" - }, - "description": "Sample for CreateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_create_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "instance_config_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance_config" - }, - "description": "Sample for CreateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_create_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "instance_partition_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance_partition" - }, - "description": "Sample for CreateInstancePartition", - "file": "spanner_v1_generated_instance_admin_create_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "instance_partition_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance_partition" - }, - "description": "Sample for CreateInstancePartition", - "file": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "spanner_v1_generated_instance_admin_create_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "spanner_v1_generated_instance_admin_create_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_config" - }, - "description": "Sample for DeleteInstanceConfig", - "file": "spanner_v1_generated_instance_admin_delete_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_config" - }, - "description": "Sample for DeleteInstanceConfig", - "file": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_partition" - }, - "description": "Sample for DeleteInstancePartition", - "file": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_partition" - }, - "description": "Sample for DeleteInstancePartition", - "file": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "spanner_v1_generated_instance_admin_delete_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "spanner_v1_generated_instance_admin_delete_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_instance_admin_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", - "shortName": "get_instance_config" - }, - "description": "Sample for GetInstanceConfig", - "file": "spanner_v1_generated_instance_admin_get_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", - "shortName": "get_instance_config" - }, - "description": "Sample for GetInstanceConfig", - "file": "spanner_v1_generated_instance_admin_get_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", - "shortName": "get_instance_partition" - }, - "description": "Sample for GetInstancePartition", - "file": "spanner_v1_generated_instance_admin_get_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", - "shortName": "get_instance_partition" - }, - "description": "Sample for GetInstancePartition", - "file": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "spanner_v1_generated_instance_admin_get_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "spanner_v1_generated_instance_admin_get_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_config_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager", - "shortName": "list_instance_config_operations" - }, - "description": "Sample for ListInstanceConfigOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_config_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager", - "shortName": "list_instance_config_operations" - }, - "description": "Sample for ListInstanceConfigOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_configs", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager", - "shortName": "list_instance_configs" - }, - "description": "Sample for ListInstanceConfigs", - "file": "spanner_v1_generated_instance_admin_list_instance_configs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_configs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_configs", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager", - "shortName": "list_instance_configs" - }, - "description": "Sample for ListInstanceConfigs", - "file": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partition_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitionOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager", - "shortName": "list_instance_partition_operations" - }, - "description": "Sample for ListInstancePartitionOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partition_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitionOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager", - "shortName": "list_instance_partition_operations" - }, - "description": "Sample for ListInstancePartitionOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partitions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager", - "shortName": "list_instance_partitions" - }, - "description": "Sample for ListInstancePartitions", - "file": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partitions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager", - "shortName": "list_instance_partitions" - }, - "description": "Sample for ListInstancePartitions", - "file": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instances", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "spanner_v1_generated_instance_admin_list_instances_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instances_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instances", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "spanner_v1_generated_instance_admin_list_instances_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.move_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "MoveInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "move_instance" - }, - "description": "Sample for MoveInstance", - "file": "spanner_v1_generated_instance_admin_move_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_move_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.move_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "MoveInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "move_instance" - }, - "description": "Sample for MoveInstance", - "file": "spanner_v1_generated_instance_admin_move_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_move_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_instance_admin_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance_config" - }, - "description": "Sample for UpdateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_update_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance_config" - }, - "description": "Sample for UpdateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_update_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance_partition" - }, - "description": "Sample for UpdateInstancePartition", - "file": "spanner_v1_generated_instance_admin_update_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance_partition" - }, - "description": "Sample for UpdateInstancePartition", - "file": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "spanner_v1_generated_instance_admin_update_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "spanner_v1_generated_instance_admin_update_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_sync.py" - } - ] -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py deleted file mode 100644 index 74bd640044..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py deleted file mode 100644 index c3f266e4c4..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py deleted file mode 100644 index c5b7616534..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py deleted file mode 100644 index a22765f53f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py deleted file mode 100644 index 5b5f2e0e26..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py deleted file mode 100644 index f43c5016b5..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py deleted file mode 100644 index 262da709aa..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py deleted file mode 100644 index df83d9e424..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_config(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py deleted file mode 100644 index 9a9c4d7ca1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_config(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py deleted file mode 100644 index 78ca44d6c2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_partition(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py deleted file mode 100644 index 72249ef6c7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_partition(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py deleted file mode 100644 index 613ac6c070..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - client.delete_instance(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py deleted file mode 100644 index a0b620ae4f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py deleted file mode 100644 index cc0d725a03..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py deleted file mode 100644 index 059eb2a078..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py deleted file mode 100644 index 9adfb51c2e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_config(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py deleted file mode 100644 index 16e9d3c3c8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_config(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py deleted file mode 100644 index 8e84abcf6e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_partition(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py deleted file mode 100644 index d617cbb382..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_partition(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py deleted file mode 100644 index 4a246a5bf3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py deleted file mode 100644 index a0580fef7c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py deleted file mode 100644 index 89213b3a2e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py deleted file mode 100644 index 651b2f88ae..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py deleted file mode 100644 index a0f120277a..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py deleted file mode 100644 index 9dedb973f1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitionOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py deleted file mode 100644 index b2a7549b29..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitionOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py deleted file mode 100644 index 56adc152fe..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py deleted file mode 100644 index 1e65552fc1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py deleted file mode 100644 index abe1a1affa..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstances_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstances_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py deleted file mode 100644 index f344baff11..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstances_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstances_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py deleted file mode 100644 index ce62120492..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MoveInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_MoveInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_MoveInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py deleted file mode 100644 index 4621200e0c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MoveInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_MoveInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_MoveInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py deleted file mode 100644 index 2443f2127d..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py deleted file mode 100644 index ba6401602f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py deleted file mode 100644 index aa0e05dde3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py deleted file mode 100644 index 80b2a4dd21..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py deleted file mode 100644 index ecabbf5191..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py deleted file mode 100644 index f7ea78401c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py deleted file mode 100644 index 1d184f6c58..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py deleted file mode 100644 index 42d3c484f8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py deleted file mode 100644 index 56cd2760a1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py deleted file mode 100644 index 2340e701e1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py deleted file mode 100644 index 8200af5099..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py +++ /dev/null @@ -1,196 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spanner_admin_instanceCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'create_instance_config': ('parent', 'instance_config_id', 'instance_config', 'validate_only', ), - 'create_instance_partition': ('parent', 'instance_partition_id', 'instance_partition', ), - 'delete_instance': ('name', ), - 'delete_instance_config': ('name', 'etag', 'validate_only', ), - 'delete_instance_partition': ('name', 'etag', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_instance': ('name', 'field_mask', ), - 'get_instance_config': ('name', ), - 'get_instance_partition': ('name', ), - 'list_instance_config_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_instance_configs': ('parent', 'page_size', 'page_token', ), - 'list_instance_partition_operations': ('parent', 'filter', 'page_size', 'page_token', 'instance_partition_deadline', ), - 'list_instance_partitions': ('parent', 'page_size', 'page_token', 'instance_partition_deadline', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'instance_deadline', ), - 'move_instance': ('name', 'target_config', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_instance': ('instance', 'field_mask', ), - 'update_instance_config': ('instance_config', 'update_mask', 'validate_only', ), - 'update_instance_partition': ('instance_partition', 'field_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spanner_admin_instanceCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner_admin_instance client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_instance/v1/setup.py b/owl-bot-staging/spanner_admin_instance/v1/setup.py deleted file mode 100644 index 8d7bb912c2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/setup.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-spanner-admin-instance' - - -description = "Google Cloud Spanner Admin Instance API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/spanner_admin_instance/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "grpcio >= 1.33.2, < 2.0.0", - "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-instance" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt deleted file mode 100644 index 5b1ee6c35a..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,13 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py deleted file mode 100644 index 82c9940cf7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ /dev/null @@ -1,17636 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import re -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminAsyncClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports -from google.cloud.spanner_admin_instance_v1.types import common -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert InstanceAdminClient._get_default_mtls_endpoint(None) is None - assert InstanceAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert InstanceAdminClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - InstanceAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert InstanceAdminClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert InstanceAdminClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - InstanceAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert InstanceAdminClient._get_client_cert_source(None, False) is None - assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert InstanceAdminClient._get_client_cert_source(None, True) is mock_default_cert_source - assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = InstanceAdminClient._DEFAULT_UNIVERSE - default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert InstanceAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - assert InstanceAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert InstanceAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert InstanceAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert InstanceAdminClient._get_universe_domain(None, None) == InstanceAdminClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - InstanceAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = InstanceAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = InstanceAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (InstanceAdminClient, "grpc"), - (InstanceAdminAsyncClient, "grpc_asyncio"), - (InstanceAdminClient, "rest"), -]) -def test_instance_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.InstanceAdminGrpcTransport, "grpc"), - (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.InstanceAdminRestTransport, "rest"), -]) -def test_instance_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (InstanceAdminClient, "grpc"), - (InstanceAdminAsyncClient, "grpc_asyncio"), - (InstanceAdminClient, "rest"), -]) -def test_instance_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -def test_instance_admin_client_get_transport_class(): - transport = InstanceAdminClient.get_transport_class() - available_transports = [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminRestTransport, - ] - assert transport in available_transports - - transport = InstanceAdminClient.get_transport_class("grpc") - assert transport == transports.InstanceAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), -]) -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -def test_instance_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "true"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "false"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "true"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "false"), -]) -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_instance_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - InstanceAdminClient, InstanceAdminAsyncClient -]) -@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) -def test_instance_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - InstanceAdminClient, InstanceAdminAsyncClient -]) -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -def test_instance_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = InstanceAdminClient._DEFAULT_UNIVERSE - default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), -]) -def test_instance_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", None), -]) -def test_instance_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_instance_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = InstanceAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_instance_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=None, - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigsRequest, - dict, -]) -def test_list_instance_configs(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_instance_configs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstanceConfigsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_configs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_instance_configs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc - request = {} - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_configs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_configs] = mock_rpc - - request = {} - await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_configs_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_instance_configs_async_from_dict(): - await test_list_instance_configs_async(request_type=dict) - -def test_list_instance_configs_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_configs_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) - await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_configs_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_configs_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_configs_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_configs_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - - -def test_list_instance_configs_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_configs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in results) -def test_list_instance_configs_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_configs_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_configs_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_configs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceConfigRequest, - dict, -]) -def test_get_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - ) - response = client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED - assert response.base_config == 'base_config_value' - assert response.etag == 'etag_value' - assert response.leader_options == ['leader_options_value'] - assert response.reconciling is True - assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING - assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE - assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION - assert response.storage_limit_per_processing_unit == 3540 - - -def test_get_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.GetInstanceConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceConfigRequest( - name='name_value', - ) - -def test_get_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc - request = {} - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance_config] = mock_rpc - - request = {} - await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - )) - response = await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED - assert response.base_config == 'base_config_value' - assert response.etag == 'etag_value' - assert response.leader_options == ['leader_options_value'] - assert response.reconciling is True - assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING - assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE - assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION - assert response.storage_limit_per_processing_unit == 3540 - - -@pytest.mark.asyncio -async def test_get_instance_config_async_from_dict(): - await test_get_instance_config_async(request_type=dict) - -def test_get_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = spanner_instance_admin.InstanceConfig() - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) - await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceConfigRequest, - dict, -]) -def test_create_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.CreateInstanceConfigRequest( - parent='parent_value', - instance_config_id='instance_config_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceConfigRequest( - parent='parent_value', - instance_config_id='instance_config_id_value', - ) - -def test_create_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc - request = {} - client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance_config] = mock_rpc - - request = {} - await client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_config_async_from_dict(): - await test_create_instance_config_async(request_type=dict) - -def test_create_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance_config( - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].instance_config_id - mock_val = 'instance_config_id_value' - assert arg == mock_val - - -def test_create_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_config( - spanner_instance_admin.CreateInstanceConfigRequest(), - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - -@pytest.mark.asyncio -async def test_create_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance_config( - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].instance_config_id - mock_val = 'instance_config_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance_config( - spanner_instance_admin.CreateInstanceConfigRequest(), - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceConfigRequest, - dict, -]) -def test_update_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.UpdateInstanceConfigRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceConfigRequest( - ) - -def test_update_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc - request = {} - client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance_config] = mock_rpc - - request = {} - await client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_config_async_from_dict(): - await test_update_instance_config_async(request_type=dict) - -def test_update_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceConfigRequest() - - request.instance_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_config.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceConfigRequest() - - request.instance_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_config.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance_config( - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_config( - spanner_instance_admin.UpdateInstanceConfigRequest(), - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance_config( - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance_config( - spanner_instance_admin.UpdateInstanceConfigRequest(), - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceConfigRequest, - dict, -]) -def test_delete_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.DeleteInstanceConfigRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceConfigRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc - request = {} - client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance_config] = mock_rpc - - request = {} - await client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_config_async_from_dict(): - await test_delete_instance_config_async(request_type=dict) - -def test_delete_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value = None - client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_config( - spanner_instance_admin.DeleteInstanceConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance_config( - spanner_instance_admin.DeleteInstanceConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigOperationsRequest, - dict, -]) -def test_list_instance_config_operations(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_instance_config_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstanceConfigOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_config_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_instance_config_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_config_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc - request = {} - client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_config_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_config_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_config_operations] = mock_rpc - - request = {} - await client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_config_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_from_dict(): - await test_list_instance_config_operations_async(request_type=dict) - -def test_list_instance_config_operations_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_config_operations_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) - await client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_config_operations_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_config_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_config_operations_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_config_operations( - spanner_instance_admin.ListInstanceConfigOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_config_operations_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_config_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_config_operations_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_config_operations( - spanner_instance_admin.ListInstanceConfigOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_config_operations_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_config_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_instance_config_operations_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_config_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_config_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_config_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancesRequest, - dict, -]) -def test_list_instances(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_instances_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instances(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_instances_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - request = {} - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instances in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc - - request = {} - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancesRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) - -def test_list_instances_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instances_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instances_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instances_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instances_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instances(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in results) -def test_list_instances_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instances_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instances(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instances_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionsRequest, - dict, -]) -def test_list_instance_partitions(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_instance_partitions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstancePartitionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_partitions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancePartitionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_instance_partitions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc - request = {} - client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_partitions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_partitions] = mock_rpc - - request = {} - await client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partitions_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_from_dict(): - await test_list_instance_partitions_async(request_type=dict) - -def test_list_instance_partitions_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_partitions_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) - await client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_partitions_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_partitions_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partitions( - spanner_instance_admin.ListInstancePartitionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_partitions_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_partitions_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_partitions( - spanner_instance_admin.ListInstancePartitionsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partitions_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_partitions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstancePartition) - for i in results) -def test_list_instance_partitions_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_partitions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_partitions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.InstancePartition) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_partitions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceRequest, - dict, -]) -def test_get_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - ) - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED - assert response.endpoint_uris == ['endpoint_uris_value'] - assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD - assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE - - -def test_get_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.GetInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceRequest( - name='name_value', - ) - -def test_get_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - request = {} - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc - - request = {} - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - )) - response = await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED - assert response.endpoint_uris == ['endpoint_uris_value'] - assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD - assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE - - -@pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) - -def test_get_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = spanner_instance_admin.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceRequest, - dict, -]) -def test_create_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', - ) - -def test_create_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc - request = {} - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc - - request = {} - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) - -def test_create_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_id - mock_val = 'instance_id_value' - assert arg == mock_val - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - - -def test_create_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_id - mock_val = 'instance_id_value' - assert arg == mock_val - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceRequest, - dict, -]) -def test_update_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.UpdateInstanceRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceRequest( - ) - -def test_update_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - request = {} - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc - - request = {} - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_async_from_dict(): - await test_update_instance_async(request_type=dict) - -def test_update_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceRequest() - - request.instance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceRequest() - - request.instance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceRequest, - dict, -]) -def test_delete_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.DeleteInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceRequest( - name='name_value', - ) - -def test_delete_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc - request = {} - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc - - request = {} - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_async_from_dict(): - await test_delete_instance_async(request_type=dict) - -def test_delete_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = None - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - - -def test_test_iam_permissions_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstancePartitionRequest, - dict, -]) -def test_get_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - node_count=1070, - ) - response = client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstancePartition) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.state == spanner_instance_admin.InstancePartition.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.referencing_backups == ['referencing_backups_value'] - assert response.etag == 'etag_value' - - -def test_get_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.GetInstancePartitionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstancePartitionRequest( - name='name_value', - ) - -def test_get_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc - request = {} - client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance_partition] = mock_rpc - - request = {} - await client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - )) - response = await client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstancePartition) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.state == spanner_instance_admin.InstancePartition.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.referencing_backups == ['referencing_backups_value'] - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_instance_partition_async_from_dict(): - await test_get_instance_partition_async(request_type=dict) - -def test_get_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value = spanner_instance_admin.InstancePartition() - client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) - await client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstancePartition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_partition( - spanner_instance_admin.GetInstancePartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstancePartition() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance_partition( - spanner_instance_admin.GetInstancePartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstancePartitionRequest, - dict, -]) -def test_create_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.CreateInstancePartitionRequest( - parent='parent_value', - instance_partition_id='instance_partition_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstancePartitionRequest( - parent='parent_value', - instance_partition_id='instance_partition_id_value', - ) - -def test_create_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc - request = {} - client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance_partition] = mock_rpc - - request = {} - await client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_partition_async_from_dict(): - await test_create_instance_partition_async(request_type=dict) - -def test_create_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstancePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstancePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance_partition( - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].instance_partition_id - mock_val = 'instance_partition_id_value' - assert arg == mock_val - - -def test_create_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_partition( - spanner_instance_admin.CreateInstancePartitionRequest(), - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - -@pytest.mark.asyncio -async def test_create_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance_partition( - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].instance_partition_id - mock_val = 'instance_partition_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance_partition( - spanner_instance_admin.CreateInstancePartitionRequest(), - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstancePartitionRequest, - dict, -]) -def test_delete_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.DeleteInstancePartitionRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstancePartitionRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc - request = {} - client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance_partition] = mock_rpc - - request = {} - await client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_partition_async_from_dict(): - await test_delete_instance_partition_async(request_type=dict) - -def test_delete_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value = None - client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_partition( - spanner_instance_admin.DeleteInstancePartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance_partition( - spanner_instance_admin.DeleteInstancePartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstancePartitionRequest, - dict, -]) -def test_update_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.UpdateInstancePartitionRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstancePartitionRequest( - ) - -def test_update_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc - request = {} - client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance_partition] = mock_rpc - - request = {} - await client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_partition_async_from_dict(): - await test_update_instance_partition_async(request_type=dict) - -def test_update_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstancePartitionRequest() - - request.instance_partition.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_partition.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstancePartitionRequest() - - request.instance_partition.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_partition.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance_partition( - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_partition( - spanner_instance_admin.UpdateInstancePartitionRequest(), - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance_partition( - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance_partition( - spanner_instance_admin.UpdateInstancePartitionRequest(), - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionOperationsRequest, - dict, -]) -def test_list_instance_partition_operations(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - ) - response = client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionOperationsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] - - -def test_list_instance_partition_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstancePartitionOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_partition_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancePartitionOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_instance_partition_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc - request = {} - client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partition_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_partition_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_partition_operations] = mock_rpc - - request = {} - await client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_partition_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - )) - response = await client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] - - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_from_dict(): - await test_list_instance_partition_operations_async(request_type=dict) - -def test_list_instance_partition_operations_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) - await client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_partition_operations_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_partition_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_partition_operations_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partition_operations( - spanner_instance_admin.ListInstancePartitionOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_partition_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_partition_operations( - spanner_instance_admin.ListInstancePartitionOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partition_operations_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_partition_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_instance_partition_operations_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_partition_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_partition_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_partition_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.MoveInstanceRequest, - dict, -]) -def test_move_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.MoveInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_move_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.MoveInstanceRequest( - name='name_value', - target_config='target_config_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.move_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.MoveInstanceRequest( - name='name_value', - target_config='target_config_value', - ) - -def test_move_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.move_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc - request = {} - client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.move_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_move_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.move_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.move_instance] = mock_rpc - - request = {} - await client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.move_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_move_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.MoveInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.MoveInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_move_instance_async_from_dict(): - await test_move_instance_async(request_type=dict) - -def test_move_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.MoveInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_move_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.MoveInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_list_instance_configs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc - - request = {} - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_configs_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_configs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_configs_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_configs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_configs_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_configs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) - - -def test_list_instance_configs_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - - -def test_list_instance_configs_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstanceConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_instance_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in results) - - pages = list(client.list_instance_configs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc - - request = {} - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_config_rest_required_fields(request_type=spanner_instance_admin.GetInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstanceConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstanceConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) - - -def test_get_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - - -def test_create_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc - - request = {} - client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_instance_config_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_config_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceConfigId"] = 'instance_config_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instanceConfigId" in jsonified_request - assert jsonified_request["instanceConfigId"] == 'instance_config_id_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "instanceConfigId", "instanceConfig", ))) - - -def test_create_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) - - -def test_create_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_config( - spanner_instance_admin.CreateInstanceConfigRequest(), - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - -def test_update_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc - - request = {} - client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_config_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instanceConfig", "updateMask", ))) - - -def test_update_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} - - # get truthy value for each flattened field - mock_args = dict( - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance_config.name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) - - -def test_update_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_config( - spanner_instance_admin.UpdateInstanceConfigRequest(), - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc - - request = {} - client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_instance_config_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) - - -def test_delete_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) - - -def test_delete_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_config( - spanner_instance_admin.DeleteInstanceConfigRequest(), - name='name_value', - ) - - -def test_list_instance_config_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_config_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc - - request = {} - client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_config_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_config_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_config_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_config_operations_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_config_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_config_operations_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_config_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigOperations" % client.transport._host, args[1]) - - -def test_list_instance_config_operations_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_config_operations( - spanner_instance_admin.ListInstanceConfigOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_config_operations_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_instance_config_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_instance_config_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_instances_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - - request = {} - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instances_rest_required_fields(request_type=spanner_instance_admin.ListInstancesRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "instance_deadline", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instances(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instances_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "instanceDeadline", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instances_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instances(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) - - -def test_list_instances_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstancesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_instances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in results) - - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_instance_partitions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc - - request = {} - client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_partitions_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_partition_deadline", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_partitions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_partitions_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_partitions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_partitions_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_partitions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) - - -def test_list_instance_partitions_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partitions( - spanner_instance_admin.ListInstancePartitionsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partitions_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstancePartitionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_instance_partitions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstancePartition) - for i in results) - - pages = list(client.list_instance_partitions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - - request = {} - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_rest_required_fields(request_type=spanner_instance_admin.GetInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("field_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.Instance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("fieldMask", )) & set(("name", ))) - - -def test_get_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.Instance() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) - - -def test_get_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - - -def test_create_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc - - request = {} - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_instance_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "instanceId", "instance", ))) - - -def test_create_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) - - -def test_create_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - -def test_update_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - - request = {} - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instance", "fieldMask", ))) - - -def test_update_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/instances/sample2'}} - - # get truthy value for each flattened field - mock_args = dict( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance.name=projects/*/instances/*}" % client.transport._host, args[1]) - - -def test_update_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc - - request = {} - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_instance_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) - - -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_set_iam_policy_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.set_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:setIamPolicy" % client.transport._host, args[1]) - - -def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", ))) - - -def test_get_iam_policy_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:getIamPolicy" % client.transport._host, args[1]) - - -def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_test_iam_permissions_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - permissions=['permissions_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.test_iam_permissions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:testIamPermissions" % client.transport._host, args[1]) - - -def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_get_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc - - request = {} - client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_partition_rest_required_fields(request_type=spanner_instance_admin.GetInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstancePartition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstancePartition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstancePartition() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstancePartition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) - - -def test_get_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_partition( - spanner_instance_admin.GetInstancePartitionRequest(), - name='name_value', - ) - - -def test_create_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc - - request = {} - client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_instance_partition_rest_required_fields(request_type=spanner_instance_admin.CreateInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_partition_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["instancePartitionId"] = 'instance_partition_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instancePartitionId" in jsonified_request - assert jsonified_request["instancePartitionId"] == 'instance_partition_id_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "instancePartitionId", "instancePartition", ))) - - -def test_create_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) - - -def test_create_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_partition( - spanner_instance_admin.CreateInstancePartitionRequest(), - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - -def test_delete_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc - - request = {} - client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_instance_partition_rest_required_fields(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) - - -def test_delete_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_partition( - spanner_instance_admin.DeleteInstancePartitionRequest(), - name='name_value', - ) - - -def test_update_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc - - request = {} - client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_partition_rest_required_fields(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instancePartition", "fieldMask", ))) - - -def test_update_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) - - -def test_update_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_partition( - spanner_instance_admin.UpdateInstancePartitionRequest(), - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_list_instance_partition_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc - - request = {} - client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partition_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_partition_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "instance_partition_deadline", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_partition_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_partition_operations_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_partition_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_partition_operations_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_partition_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitionOperations" % client.transport._host, args[1]) - - -def test_list_instance_partition_operations_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partition_operations( - spanner_instance_admin.ListInstancePartitionOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partition_operations_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_instance_partition_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_instance_partition_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_move_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.move_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc - - request = {} - client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.move_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_move_instance_rest_required_fields(request_type=spanner_instance_admin.MoveInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request_init["target_config"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["targetConfig"] = 'target_config_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "targetConfig" in jsonified_request - assert jsonified_request["targetConfig"] == 'target_config_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.move_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_move_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.move_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "targetConfig", ))) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = InstanceAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.InstanceAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - transports.InstanceAdminRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = InstanceAdminClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_configs_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - client.list_instance_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = spanner_instance_admin.InstanceConfig() - client.get_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value = None - client.delete_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_config_operations_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - client.list_instance_config_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instances_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancesResponse() - client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partitions_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - client.list_instance_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = spanner_instance_admin.Instance() - client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = None - client.delete_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value = spanner_instance_admin.InstancePartition() - client.get_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value = None - client.delete_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partition_operations_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - client.list_instance_partition_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.move_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.MoveInstanceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = InstanceAdminAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_configs_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - )) - await client.list_instance_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - )) - await client.get_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_config_operations_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_instance_config_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instances_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_partitions_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instance_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - )) - await client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - )) - await client.get_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_partition_operations_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - )) - await client.list_instance_partition_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_move_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.move_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.MoveInstanceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = InstanceAdminClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_instance_configs_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_configs(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigsRequest, - dict, -]) -def test_list_instance_configs_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_configs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_configs_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_configs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstanceConfigsRequest.pb(spanner_instance_admin.ListInstanceConfigsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstanceConfigsResponse.to_json(spanner_instance_admin.ListInstanceConfigsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstanceConfigsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigsResponse(), metadata - - client.list_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_config_rest_bad_request(request_type=spanner_instance_admin.GetInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceConfigRequest, - dict, -]) -def test_get_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED - assert response.base_config == 'base_config_value' - assert response.etag == 'etag_value' - assert response.leader_options == ['leader_options_value'] - assert response.reconciling is True - assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING - assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE - assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION - assert response.storage_limit_per_processing_unit == 3540 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.GetInstanceConfigRequest.pb(spanner_instance_admin.GetInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.InstanceConfig.to_json(spanner_instance_admin.InstanceConfig()) - req.return_value.content = return_value - - request = spanner_instance_admin.GetInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.InstanceConfig() - post_with_metadata.return_value = spanner_instance_admin.InstanceConfig(), metadata - - client.get_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_instance_config_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceConfigRequest, - dict, -]) -def test_create_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.CreateInstanceConfigRequest.pb(spanner_instance_admin.CreateInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.CreateInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_instance_config_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceConfigRequest, - dict, -]) -def test_update_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.UpdateInstanceConfigRequest.pb(spanner_instance_admin.UpdateInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.UpdateInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_instance_config_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceConfigRequest, - dict, -]) -def test_delete_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance_config(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_config") as pre: - pre.assert_not_called() - pb_message = spanner_instance_admin.DeleteInstanceConfigRequest.pb(spanner_instance_admin.DeleteInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_instance_admin.DeleteInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_instance_config_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_config_operations(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigOperationsRequest, - dict, -]) -def test_list_instance_config_operations_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_config_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_config_operations_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_config_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(spanner_instance_admin.ListInstanceConfigOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(spanner_instance_admin.ListInstanceConfigOperationsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse(), metadata - - client.list_instance_config_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_instances_rest_bad_request(request_type=spanner_instance_admin.ListInstancesRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instances(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancesRequest, - dict, -]) -def test_list_instances_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instances") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstancesRequest.pb(spanner_instance_admin.ListInstancesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstancesResponse.to_json(spanner_instance_admin.ListInstancesResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstancesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstancesResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstancesResponse(), metadata - - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_instance_partitions_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_partitions(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionsRequest, - dict, -]) -def test_list_instance_partitions_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_partitions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_partitions_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partitions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstancePartitionsRequest.pb(spanner_instance_admin.ListInstancePartitionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstancePartitionsResponse.to_json(spanner_instance_admin.ListInstancePartitionsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstancePartitionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionsResponse(), metadata - - client.list_instance_partitions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_rest_bad_request(request_type=spanner_instance_admin.GetInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceRequest, - dict, -]) -def test_get_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED - assert response.endpoint_uris == ['endpoint_uris_value'] - assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD - assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.GetInstanceRequest.pb(spanner_instance_admin.GetInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.Instance.to_json(spanner_instance_admin.Instance()) - req.return_value.content = return_value - - request = spanner_instance_admin.GetInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.Instance() - post_with_metadata.return_value = spanner_instance_admin.Instance(), metadata - - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_instance_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceRequest, - dict, -]) -def test_create_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.CreateInstanceRequest.pb(spanner_instance_admin.CreateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.CreateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_instance_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceRequest, - dict, -]) -def test_update_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.UpdateInstanceRequest.pb(spanner_instance_admin.UpdateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.UpdateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_instance_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceRequest, - dict, -]) -def test_delete_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance") as pre: - pre.assert_not_called() - pb_message = spanner_instance_admin.DeleteInstanceRequest.pb(spanner_instance_admin.DeleteInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_instance_admin.DeleteInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_partition_rest_bad_request(request_type=spanner_instance_admin.GetInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstancePartitionRequest, - dict, -]) -def test_get_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - node_count=1070, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstancePartition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance_partition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstancePartition) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.state == spanner_instance_admin.InstancePartition.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.referencing_backups == ['referencing_backups_value'] - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.GetInstancePartitionRequest.pb(spanner_instance_admin.GetInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.InstancePartition.to_json(spanner_instance_admin.InstancePartition()) - req.return_value.content = return_value - - request = spanner_instance_admin.GetInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.InstancePartition() - post_with_metadata.return_value = spanner_instance_admin.InstancePartition(), metadata - - client.get_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_instance_partition_rest_bad_request(request_type=spanner_instance_admin.CreateInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstancePartitionRequest, - dict, -]) -def test_create_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance_partition(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.CreateInstancePartitionRequest.pb(spanner_instance_admin.CreateInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.CreateInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_instance_partition_rest_bad_request(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstancePartitionRequest, - dict, -]) -def test_delete_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance_partition(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_partition") as pre: - pre.assert_not_called() - pb_message = spanner_instance_admin.DeleteInstancePartitionRequest.pb(spanner_instance_admin.DeleteInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_instance_admin.DeleteInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_update_instance_partition_rest_bad_request(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstancePartitionRequest, - dict, -]) -def test_update_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance_partition(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.UpdateInstancePartitionRequest.pb(spanner_instance_admin.UpdateInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.UpdateInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_instance_partition_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_partition_operations(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionOperationsRequest, - dict, -]) -def test_list_instance_partition_operations_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_partition_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionOperationsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_partition_operations_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partition_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(spanner_instance_admin.ListInstancePartitionOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(spanner_instance_admin.ListInstancePartitionOperationsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse(), metadata - - client.list_instance_partition_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_move_instance_rest_bad_request(request_type=spanner_instance_admin.MoveInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.move_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.MoveInstanceRequest, - dict, -]) -def test_move_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.move_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_move_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_move_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.MoveInstanceRequest.pb(spanner_instance_admin.MoveInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.MoveInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.move_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_configs_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - client.list_instance_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - client.get_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - client.create_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - client.update_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - client.delete_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_config_operations_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - client.list_instance_config_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instances_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partitions_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - client.list_instance_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - client.create_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - client.delete_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - client.get_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - client.create_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - client.delete_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - client.update_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partition_operations_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - client.list_instance_partition_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - client.move_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.MoveInstanceRequest() - - assert args[0] == request_msg - - -def test_instance_admin_rest_lro_client(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.InstanceAdminGrpcTransport, - ) - -def test_instance_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.InstanceAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_instance_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.InstanceAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_instance_configs', - 'get_instance_config', - 'create_instance_config', - 'update_instance_config', - 'delete_instance_config', - 'list_instance_config_operations', - 'list_instances', - 'list_instance_partitions', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_instance_partition', - 'create_instance_partition', - 'delete_instance_partition', - 'update_instance_partition', - 'list_instance_partition_operations', - 'move_instance', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_instance_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id="octopus", - ) - - -def test_instance_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport() - adc.assert_called_once() - - -def test_instance_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - ], -) -def test_instance_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - transports.InstanceAdminRestTransport, - ], -) -def test_instance_admin_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.InstanceAdminGrpcTransport, grpc_helpers), - (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_instance_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_instance_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.InstanceAdminRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_instance_admin_host_no_port(transport_name): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_instance_admin_host_with_port(transport_name): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_instance_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = InstanceAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = InstanceAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_instance_configs._session - session2 = client2.transport.list_instance_configs._session - assert session1 != session2 - session1 = client1.transport.get_instance_config._session - session2 = client2.transport.get_instance_config._session - assert session1 != session2 - session1 = client1.transport.create_instance_config._session - session2 = client2.transport.create_instance_config._session - assert session1 != session2 - session1 = client1.transport.update_instance_config._session - session2 = client2.transport.update_instance_config._session - assert session1 != session2 - session1 = client1.transport.delete_instance_config._session - session2 = client2.transport.delete_instance_config._session - assert session1 != session2 - session1 = client1.transport.list_instance_config_operations._session - session2 = client2.transport.list_instance_config_operations._session - assert session1 != session2 - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.list_instance_partitions._session - session2 = client2.transport.list_instance_partitions._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.create_instance._session - session2 = client2.transport.create_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.delete_instance._session - session2 = client2.transport.delete_instance._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 - session1 = client1.transport.get_instance_partition._session - session2 = client2.transport.get_instance_partition._session - assert session1 != session2 - session1 = client1.transport.create_instance_partition._session - session2 = client2.transport.create_instance_partition._session - assert session1 != session2 - session1 = client1.transport.delete_instance_partition._session - session2 = client2.transport.delete_instance_partition._session - assert session1 != session2 - session1 = client1.transport.update_instance_partition._session - session2 = client2.transport.update_instance_partition._session - assert session1 != session2 - session1 = client1.transport.list_instance_partition_operations._session - session2 = client2.transport.list_instance_partition_operations._session - assert session1 != session2 - session1 = client1.transport.move_instance._session - session2 = client2.transport.move_instance._session - assert session1 != session2 -def test_instance_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.InstanceAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_instance_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.InstanceAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_instance_admin_grpc_lro_client(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_admin_grpc_lro_async_client(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - instance = "clam" - expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - actual = InstanceAdminClient.instance_path(project, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "whelk", - "instance": "octopus", - } - path = InstanceAdminClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_path(path) - assert expected == actual - -def test_instance_config_path(): - project = "oyster" - instance_config = "nudibranch" - expected = "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) - actual = InstanceAdminClient.instance_config_path(project, instance_config) - assert expected == actual - - -def test_parse_instance_config_path(): - expected = { - "project": "cuttlefish", - "instance_config": "mussel", - } - path = InstanceAdminClient.instance_config_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_config_path(path) - assert expected == actual - -def test_instance_partition_path(): - project = "winkle" - instance = "nautilus" - instance_partition = "scallop" - expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - actual = InstanceAdminClient.instance_partition_path(project, instance, instance_partition) - assert expected == actual - - -def test_parse_instance_partition_path(): - expected = { - "project": "abalone", - "instance": "squid", - "instance_partition": "clam", - } - path = InstanceAdminClient.instance_partition_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_partition_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = InstanceAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = InstanceAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = InstanceAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = InstanceAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = InstanceAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = InstanceAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = InstanceAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = InstanceAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = InstanceAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = InstanceAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = InstanceAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json index d10e70605f..e89008727d 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-database", - "version": "3.58.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json index 05a040bd1b..f58e9794e2 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner-admin-instance", - "version": "3.58.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json index 1eb4c96ad5..f7f33c3d29 100644 --- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-spanner", - "version": "3.58.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index ad3f0fa58e..ef1c92ffff 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index ad3f0fa58e..ef1c92ffff 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt index ad3f0fa58e..ef1c92ffff 100644 --- a/testing/constraints-3.12.txt +++ b/testing/constraints-3.12.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt index 2010e549cc..2ae5a677e8 100644 --- a/testing/constraints-3.13.txt +++ b/testing/constraints-3.13.txt @@ -7,6 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt b/testing/constraints-3.14.txt similarity index 100% rename from owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt rename to testing/constraints-3.14.txt diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index ad3f0fa58e..ef1c92ffff 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index ad3f0fa58e..ef1c92ffff 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 From 3f9c903d94724481338af982878b979f9761a654 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 28 Oct 2025 23:00:34 +0000 Subject: [PATCH 3/4] feat: Exposing AutoscalingConfig in InstancePartition PiperOrigin-RevId: 825184314 Source-Link: https://github.com/googleapis/googleapis/commit/72e7439c8e7e9986cf1865e337fc7c64ca5bda1f Source-Link: https://github.com/googleapis/googleapis-gen/commit/007caa06bac676577f8d845b8793c16eff66ac9a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA3Y2FhMDZiYWM2NzY1NzdmOGQ4NDViODc5M2MxNmVmZjY2YWM5YSJ9 --- owl-bot-staging/spanner/v1/.coveragerc | 13 + owl-bot-staging/spanner/v1/.flake8 | 34 + owl-bot-staging/spanner/v1/LICENSE | 202 + owl-bot-staging/spanner/v1/MANIFEST.in | 20 + owl-bot-staging/spanner/v1/README.rst | 143 + .../spanner/v1/docs/_static/custom.css | 20 + .../spanner/v1/docs/_templates/layout.html | 50 + owl-bot-staging/spanner/v1/docs/conf.py | 385 + owl-bot-staging/spanner/v1/docs/index.rst | 10 + .../spanner/v1/docs/multiprocessing.rst | 7 + .../spanner/v1/docs/spanner_v1/services_.rst | 6 + .../spanner/v1/docs/spanner_v1/spanner.rst | 10 + .../spanner/v1/docs/spanner_v1/types_.rst | 6 + .../v1/google/cloud/spanner/__init__.py | 113 + .../v1/google/cloud/spanner/gapic_version.py | 16 + .../spanner/v1/google/cloud/spanner/py.typed | 2 + .../v1/google/cloud/spanner_v1/__init__.py | 114 + .../cloud/spanner_v1/gapic_metadata.json | 268 + .../google/cloud/spanner_v1/gapic_version.py | 16 + .../v1/google/cloud/spanner_v1/py.typed | 2 + .../cloud/spanner_v1/services/__init__.py | 15 + .../spanner_v1/services/spanner/__init__.py | 22 + .../services/spanner/async_client.py | 2125 ++ .../spanner_v1/services/spanner/client.py | 2486 ++ .../spanner_v1/services/spanner/pagers.py | 166 + .../services/spanner/transports/README.rst | 9 + .../services/spanner/transports/__init__.py | 38 + .../services/spanner/transports/base.py | 505 + .../services/spanner/transports/grpc.py | 899 + .../spanner/transports/grpc_asyncio.py | 1125 + .../services/spanner/transports/rest.py | 2898 ++ .../services/spanner/transports/rest_base.py | 817 + .../google/cloud/spanner_v1/types/__init__.py | 122 + .../cloud/spanner_v1/types/change_stream.py | 683 + .../cloud/spanner_v1/types/commit_response.py | 104 + .../v1/google/cloud/spanner_v1/types/keys.py | 248 + .../google/cloud/spanner_v1/types/mutation.py | 201 + .../cloud/spanner_v1/types/query_plan.py | 219 + .../cloud/spanner_v1/types/result_set.py | 379 + .../google/cloud/spanner_v1/types/spanner.py | 1782 ++ .../cloud/spanner_v1/types/transaction.py | 491 + .../v1/google/cloud/spanner_v1/types/type.py | 288 + owl-bot-staging/spanner/v1/mypy.ini | 3 + owl-bot-staging/spanner/v1/noxfile.py | 601 + .../snippet_metadata_google.spanner.v1.json | 2579 ++ ...ted_spanner_batch_create_sessions_async.py | 53 + ...ated_spanner_batch_create_sessions_sync.py | 53 + ..._v1_generated_spanner_batch_write_async.py | 57 + ...r_v1_generated_spanner_batch_write_sync.py | 57 + ...nerated_spanner_begin_transaction_async.py | 52 + ...enerated_spanner_begin_transaction_sync.py | 52 + ...anner_v1_generated_spanner_commit_async.py | 53 + ...panner_v1_generated_spanner_commit_sync.py | 53 + ..._generated_spanner_create_session_async.py | 52 + ...1_generated_spanner_create_session_sync.py | 52 + ..._generated_spanner_delete_session_async.py | 50 + ...1_generated_spanner_delete_session_sync.py | 50 + ...nerated_spanner_execute_batch_dml_async.py | 57 + ...enerated_spanner_execute_batch_dml_sync.py | 57 + ..._v1_generated_spanner_execute_sql_async.py | 53 + ...r_v1_generated_spanner_execute_sql_sync.py | 53 + ...ted_spanner_execute_streaming_sql_async.py | 54 + ...ated_spanner_execute_streaming_sql_sync.py | 54 + ..._v1_generated_spanner_get_session_async.py | 52 + ...r_v1_generated_spanner_get_session_sync.py | 52 + ...1_generated_spanner_list_sessions_async.py | 53 + ...v1_generated_spanner_list_sessions_sync.py | 53 + ...generated_spanner_partition_query_async.py | 53 + ..._generated_spanner_partition_query_sync.py | 53 + ..._generated_spanner_partition_read_async.py | 53 + ...1_generated_spanner_partition_read_sync.py | 53 + ...spanner_v1_generated_spanner_read_async.py | 54 + .../spanner_v1_generated_spanner_read_sync.py | 54 + ...ner_v1_generated_spanner_rollback_async.py | 51 + ...nner_v1_generated_spanner_rollback_sync.py | 51 + ..._generated_spanner_streaming_read_async.py | 55 + ...1_generated_spanner_streaming_read_sync.py | 55 + .../v1/scripts/fixup_spanner_v1_keywords.py | 191 + owl-bot-staging/spanner/v1/setup.py | 101 + .../spanner/v1/testing/constraints-3.10.txt | 8 + .../spanner/v1/testing/constraints-3.11.txt | 8 + .../spanner/v1/testing/constraints-3.12.txt | 8 + .../spanner/v1/testing/constraints-3.13.txt | 12 + .../spanner/v1/testing/constraints-3.14.txt | 12 + .../spanner/v1/testing/constraints-3.7.txt | 12 + .../spanner/v1/testing/constraints-3.8.txt | 8 + .../spanner/v1/testing/constraints-3.9.txt | 8 + owl-bot-staging/spanner/v1/tests/__init__.py | 16 + .../spanner/v1/tests/unit/__init__.py | 16 + .../spanner/v1/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/spanner_v1/__init__.py | 16 + .../unit/gapic/spanner_v1/test_spanner.py | 11446 ++++++++ .../spanner_admin_database/v1/.coveragerc | 13 + .../spanner_admin_database/v1/.flake8 | 34 + .../spanner_admin_database/v1/LICENSE | 202 + .../spanner_admin_database/v1/MANIFEST.in | 20 + .../spanner_admin_database/v1/README.rst | 143 + .../v1/docs/_static/custom.css | 20 + .../v1/docs/_templates/layout.html | 50 + .../spanner_admin_database/v1/docs/conf.py | 385 + .../spanner_admin_database/v1/docs/index.rst | 10 + .../v1/docs/multiprocessing.rst | 7 + .../database_admin.rst | 10 + .../spanner_admin_database_v1/services_.rst | 6 + .../docs/spanner_admin_database_v1/types_.rst | 6 + .../cloud/spanner_admin_database/__init__.py | 149 + .../spanner_admin_database/gapic_version.py | 16 + .../cloud/spanner_admin_database/py.typed | 2 + .../spanner_admin_database_v1/__init__.py | 150 + .../gapic_metadata.json | 433 + .../gapic_version.py | 16 + .../cloud/spanner_admin_database_v1/py.typed | 2 + .../services/__init__.py | 15 + .../services/database_admin/__init__.py | 22 + .../services/database_admin/async_client.py | 3968 +++ .../services/database_admin/client.py | 4387 +++ .../services/database_admin/pagers.py | 864 + .../database_admin/transports/README.rst | 9 + .../database_admin/transports/__init__.py | 38 + .../database_admin/transports/base.py | 795 + .../database_admin/transports/grpc.py | 1285 + .../database_admin/transports/grpc_asyncio.py | 1656 ++ .../database_admin/transports/rest.py | 5336 ++++ .../database_admin/transports/rest_base.py | 1378 + .../types/__init__.py | 148 + .../spanner_admin_database_v1/types/backup.py | 1097 + .../types/backup_schedule.py | 369 + .../spanner_admin_database_v1/types/common.py | 179 + .../types/spanner_database_admin.py | 1348 + .../spanner_admin_database/v1/mypy.ini | 3 + .../spanner_admin_database/v1/noxfile.py | 601 + ...data_google.spanner.admin.database.v1.json | 4480 ++++ ...d_database_admin_add_split_points_async.py | 52 + ...ed_database_admin_add_split_points_sync.py | 52 + ...erated_database_admin_copy_backup_async.py | 58 + ...nerated_database_admin_copy_backup_sync.py | 58 + ...ated_database_admin_create_backup_async.py | 57 + ...base_admin_create_backup_schedule_async.py | 53 + ...abase_admin_create_backup_schedule_sync.py | 53 + ...rated_database_admin_create_backup_sync.py | 57 + ...ed_database_admin_create_database_async.py | 57 + ...ted_database_admin_create_database_sync.py | 57 + ...ated_database_admin_delete_backup_async.py | 50 + ...base_admin_delete_backup_schedule_async.py | 50 + ...abase_admin_delete_backup_schedule_sync.py | 50 + ...rated_database_admin_delete_backup_sync.py | 50 + ...ated_database_admin_drop_database_async.py | 50 + ...rated_database_admin_drop_database_sync.py | 50 + ...nerated_database_admin_get_backup_async.py | 52 + ...atabase_admin_get_backup_schedule_async.py | 52 + ...database_admin_get_backup_schedule_sync.py | 52 + ...enerated_database_admin_get_backup_sync.py | 52 + ...rated_database_admin_get_database_async.py | 52 + ...d_database_admin_get_database_ddl_async.py | 52 + ...ed_database_admin_get_database_ddl_sync.py | 52 + ...erated_database_admin_get_database_sync.py | 52 + ...ted_database_admin_get_iam_policy_async.py | 53 + ...ated_database_admin_get_iam_policy_sync.py | 53 + ...n_internal_update_graph_operation_async.py | 54 + ...in_internal_update_graph_operation_sync.py | 54 + ...base_admin_list_backup_operations_async.py | 53 + ...abase_admin_list_backup_operations_sync.py | 53 + ...abase_admin_list_backup_schedules_async.py | 53 + ...tabase_admin_list_backup_schedules_sync.py | 53 + ...rated_database_admin_list_backups_async.py | 53 + ...erated_database_admin_list_backups_sync.py | 53 + ...se_admin_list_database_operations_async.py | 53 + ...ase_admin_list_database_operations_sync.py | 53 + ...atabase_admin_list_database_roles_async.py | 53 + ...database_admin_list_database_roles_sync.py | 53 + ...ted_database_admin_list_databases_async.py | 53 + ...ated_database_admin_list_databases_sync.py | 53 + ...d_database_admin_restore_database_async.py | 58 + ...ed_database_admin_restore_database_sync.py | 58 + ...ted_database_admin_set_iam_policy_async.py | 53 + ...ated_database_admin_set_iam_policy_sync.py | 53 + ...tabase_admin_test_iam_permissions_async.py | 54 + ...atabase_admin_test_iam_permissions_sync.py | 54 + ...ated_database_admin_update_backup_async.py | 51 + ...base_admin_update_backup_schedule_async.py | 51 + ...abase_admin_update_backup_schedule_sync.py | 51 + ...rated_database_admin_update_backup_sync.py | 51 + ...ed_database_admin_update_database_async.py | 59 + ...atabase_admin_update_database_ddl_async.py | 57 + ...database_admin_update_database_ddl_sync.py | 57 + ...ted_database_admin_update_database_sync.py | 59 + ...ixup_spanner_admin_database_v1_keywords.py | 202 + .../spanner_admin_database/v1/setup.py | 102 + .../v1/testing/constraints-3.10.txt | 9 + .../v1/testing/constraints-3.11.txt | 9 + .../v1/testing/constraints-3.12.txt | 9 + .../v1/testing/constraints-3.13.txt | 13 + .../v1/testing/constraints-3.14.txt | 13 + .../v1/testing/constraints-3.7.txt | 13 + .../v1/testing/constraints-3.8.txt | 9 + .../v1/testing/constraints-3.9.txt | 9 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../spanner_admin_database_v1/__init__.py | 16 + .../test_database_admin.py | 22226 ++++++++++++++++ .../spanner_admin_instance/v1/.coveragerc | 13 + .../spanner_admin_instance/v1/.flake8 | 34 + .../spanner_admin_instance/v1/LICENSE | 202 + .../spanner_admin_instance/v1/MANIFEST.in | 20 + .../spanner_admin_instance/v1/README.rst | 143 + .../v1/docs/_static/custom.css | 20 + .../v1/docs/_templates/layout.html | 50 + .../spanner_admin_instance/v1/docs/conf.py | 385 + .../spanner_admin_instance/v1/docs/index.rst | 10 + .../v1/docs/multiprocessing.rst | 7 + .../instance_admin.rst | 10 + .../spanner_admin_instance_v1/services_.rst | 6 + .../docs/spanner_admin_instance_v1/types_.rst | 6 + .../cloud/spanner_admin_instance/__init__.py | 109 + .../spanner_admin_instance/gapic_version.py | 16 + .../cloud/spanner_admin_instance/py.typed | 2 + .../spanner_admin_instance_v1/__init__.py | 110 + .../gapic_metadata.json | 343 + .../gapic_version.py | 16 + .../cloud/spanner_admin_instance_v1/py.typed | 2 + .../services/__init__.py | 15 + .../services/instance_admin/__init__.py | 22 + .../services/instance_admin/async_client.py | 3468 +++ .../services/instance_admin/client.py | 3849 +++ .../services/instance_admin/pagers.py | 723 + .../instance_admin/transports/README.rst | 9 + .../instance_admin/transports/__init__.py | 38 + .../instance_admin/transports/base.py | 567 + .../instance_admin/transports/grpc.py | 1359 + .../instance_admin/transports/grpc_asyncio.py | 1560 ++ .../instance_admin/transports/rest.py | 4462 ++++ .../instance_admin/transports/rest_base.py | 1152 + .../types/__init__.py | 104 + .../spanner_admin_instance_v1/types/common.py | 99 + .../types/spanner_instance_admin.py | 2377 ++ .../spanner_admin_instance/v1/mypy.ini | 3 + .../spanner_admin_instance/v1/noxfile.py | 601 + ...data_google.spanner.admin.instance.v1.json | 3450 +++ ...ed_instance_admin_create_instance_async.py | 63 + ...ance_admin_create_instance_config_async.py | 57 + ...tance_admin_create_instance_config_sync.py | 57 + ...e_admin_create_instance_partition_async.py | 64 + ...ce_admin_create_instance_partition_sync.py | 64 + ...ted_instance_admin_create_instance_sync.py | 63 + ...ed_instance_admin_delete_instance_async.py | 50 + ...ance_admin_delete_instance_config_async.py | 50 + ...tance_admin_delete_instance_config_sync.py | 50 + ...e_admin_delete_instance_partition_async.py | 50 + ...ce_admin_delete_instance_partition_sync.py | 50 + ...ted_instance_admin_delete_instance_sync.py | 50 + ...ted_instance_admin_get_iam_policy_async.py | 53 + ...ated_instance_admin_get_iam_policy_sync.py | 53 + ...rated_instance_admin_get_instance_async.py | 52 + ...nstance_admin_get_instance_config_async.py | 52 + ...instance_admin_get_instance_config_sync.py | 52 + ...ance_admin_get_instance_partition_async.py | 52 + ...tance_admin_get_instance_partition_sync.py | 52 + ...erated_instance_admin_get_instance_sync.py | 52 + ...n_list_instance_config_operations_async.py | 53 + ...in_list_instance_config_operations_sync.py | 53 + ...tance_admin_list_instance_configs_async.py | 53 + ...stance_admin_list_instance_configs_sync.py | 53 + ...ist_instance_partition_operations_async.py | 53 + ...list_instance_partition_operations_sync.py | 53 + ...ce_admin_list_instance_partitions_async.py | 53 + ...nce_admin_list_instance_partitions_sync.py | 53 + ...ted_instance_admin_list_instances_async.py | 53 + ...ated_instance_admin_list_instances_sync.py | 53 + ...ated_instance_admin_move_instance_async.py | 57 + ...rated_instance_admin_move_instance_sync.py | 57 + ...ted_instance_admin_set_iam_policy_async.py | 53 + ...ated_instance_admin_set_iam_policy_sync.py | 53 + ...stance_admin_test_iam_permissions_async.py | 54 + ...nstance_admin_test_iam_permissions_sync.py | 54 + ...ed_instance_admin_update_instance_async.py | 61 + ...ance_admin_update_instance_config_async.py | 55 + ...tance_admin_update_instance_config_sync.py | 55 + ...e_admin_update_instance_partition_async.py | 62 + ...ce_admin_update_instance_partition_sync.py | 62 + ...ted_instance_admin_update_instance_sync.py | 61 + ...ixup_spanner_admin_instance_v1_keywords.py | 196 + .../spanner_admin_instance/v1/setup.py | 102 + .../v1/testing/constraints-3.10.txt | 9 + .../v1/testing/constraints-3.11.txt | 9 + .../v1/testing/constraints-3.12.txt | 9 + .../v1/testing/constraints-3.13.txt | 13 + .../v1/testing/constraints-3.14.txt | 13 + .../v1/testing/constraints-3.7.txt | 13 + .../v1/testing/constraints-3.8.txt | 9 + .../v1/testing/constraints-3.9.txt | 9 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../spanner_admin_instance_v1/__init__.py | 16 + .../test_instance_admin.py | 17636 ++++++++++++ 296 files changed, 134784 insertions(+) create mode 100644 owl-bot-staging/spanner/v1/.coveragerc create mode 100644 owl-bot-staging/spanner/v1/.flake8 create mode 100644 owl-bot-staging/spanner/v1/LICENSE create mode 100644 owl-bot-staging/spanner/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner/v1/README.rst create mode 100644 owl-bot-staging/spanner/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/spanner/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/spanner/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst create mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py create mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py create mode 100644 owl-bot-staging/spanner/v1/mypy.ini create mode 100644 owl-bot-staging/spanner/v1/noxfile.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py create mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py create mode 100644 owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py create mode 100644 owl-bot-staging/spanner/v1/setup.py create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.14.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/spanner/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py create mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/.coveragerc create mode 100644 owl-bot-staging/spanner_admin_database/v1/.flake8 create mode 100644 owl-bot-staging/spanner_admin_database/v1/LICENSE create mode 100644 owl-bot-staging/spanner_admin_database/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner_admin_database/v1/README.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/mypy.ini create mode 100644 owl-bot-staging/spanner_admin_database/v1/noxfile.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/setup.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/.coveragerc create mode 100644 owl-bot-staging/spanner_admin_instance/v1/.flake8 create mode 100644 owl-bot-staging/spanner_admin_instance/v1/LICENSE create mode 100644 owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in create mode 100644 owl-bot-staging/spanner_admin_instance/v1/README.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/conf.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/index.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/mypy.ini create mode 100644 owl-bot-staging/spanner_admin_instance/v1/noxfile.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/setup.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py create mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py diff --git a/owl-bot-staging/spanner/v1/.coveragerc b/owl-bot-staging/spanner/v1/.coveragerc new file mode 100644 index 0000000000..677a992799 --- /dev/null +++ b/owl-bot-staging/spanner/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner/__init__.py + google/cloud/spanner/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/spanner/v1/.flake8 b/owl-bot-staging/spanner/v1/.flake8 new file mode 100644 index 0000000000..90316de214 --- /dev/null +++ b/owl-bot-staging/spanner/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/spanner/v1/LICENSE b/owl-bot-staging/spanner/v1/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/owl-bot-staging/spanner/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/spanner/v1/MANIFEST.in b/owl-bot-staging/spanner/v1/MANIFEST.in new file mode 100644 index 0000000000..dae249ec89 --- /dev/null +++ b/owl-bot-staging/spanner/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/spanner/v1/README.rst b/owl-bot-staging/spanner/v1/README.rst new file mode 100644 index 0000000000..502ebce1c4 --- /dev/null +++ b/owl-bot-staging/spanner/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Spanner API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner/v1/docs/_static/custom.css b/owl-bot-staging/spanner/v1/docs/_static/custom.css new file mode 100644 index 0000000000..b0a295464b --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/spanner/v1/docs/_templates/layout.html b/owl-bot-staging/spanner/v1/docs/_templates/layout.html new file mode 100644 index 0000000000..95e9c77fcf --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/spanner/v1/docs/conf.py b/owl-bot-staging/spanner/v1/docs/conf.py new file mode 100644 index 0000000000..010a6b6cda --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-spanner", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-spanner.tex", + u"google-cloud-spanner Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-spanner", + "google-cloud-spanner Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-spanner", + "google-cloud-spanner Documentation", + author, + "google-cloud-spanner", + "google-cloud-spanner Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner/v1/docs/index.rst b/owl-bot-staging/spanner/v1/docs/index.rst new file mode 100644 index 0000000000..1162c85acc --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_v1/services_ + spanner_v1/types_ diff --git a/owl-bot-staging/spanner/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner/v1/docs/multiprocessing.rst new file mode 100644 index 0000000000..536d17b2ea --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst new file mode 100644 index 0000000000..3bbbb55f79 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner v1 API +======================================== +.. toctree:: + :maxdepth: 2 + + spanner diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst new file mode 100644 index 0000000000..b51f4447e4 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst @@ -0,0 +1,10 @@ +Spanner +------------------------- + +.. automodule:: google.cloud.spanner_v1.services.spanner + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_v1.services.spanner.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst new file mode 100644 index 0000000000..c7ff7e6c71 --- /dev/null +++ b/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Spanner v1 API +===================================== + +.. automodule:: google.cloud.spanner_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py new file mode 100644 index 0000000000..4757e26d69 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.spanner_v1.services.spanner.client import SpannerClient +from google.cloud.spanner_v1.services.spanner.async_client import SpannerAsyncClient + +from google.cloud.spanner_v1.types.change_stream import ChangeStreamRecord +from google.cloud.spanner_v1.types.commit_response import CommitResponse +from google.cloud.spanner_v1.types.keys import KeyRange +from google.cloud.spanner_v1.types.keys import KeySet +from google.cloud.spanner_v1.types.mutation import Mutation +from google.cloud.spanner_v1.types.query_plan import PlanNode +from google.cloud.spanner_v1.types.query_plan import QueryPlan +from google.cloud.spanner_v1.types.result_set import PartialResultSet +from google.cloud.spanner_v1.types.result_set import ResultSet +from google.cloud.spanner_v1.types.result_set import ResultSetMetadata +from google.cloud.spanner_v1.types.result_set import ResultSetStats +from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsRequest +from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsResponse +from google.cloud.spanner_v1.types.spanner import BatchWriteRequest +from google.cloud.spanner_v1.types.spanner import BatchWriteResponse +from google.cloud.spanner_v1.types.spanner import BeginTransactionRequest +from google.cloud.spanner_v1.types.spanner import CommitRequest +from google.cloud.spanner_v1.types.spanner import CreateSessionRequest +from google.cloud.spanner_v1.types.spanner import DeleteSessionRequest +from google.cloud.spanner_v1.types.spanner import DirectedReadOptions +from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlRequest +from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlResponse +from google.cloud.spanner_v1.types.spanner import ExecuteSqlRequest +from google.cloud.spanner_v1.types.spanner import GetSessionRequest +from google.cloud.spanner_v1.types.spanner import ListSessionsRequest +from google.cloud.spanner_v1.types.spanner import ListSessionsResponse +from google.cloud.spanner_v1.types.spanner import Partition +from google.cloud.spanner_v1.types.spanner import PartitionOptions +from google.cloud.spanner_v1.types.spanner import PartitionQueryRequest +from google.cloud.spanner_v1.types.spanner import PartitionReadRequest +from google.cloud.spanner_v1.types.spanner import PartitionResponse +from google.cloud.spanner_v1.types.spanner import ReadRequest +from google.cloud.spanner_v1.types.spanner import RequestOptions +from google.cloud.spanner_v1.types.spanner import RollbackRequest +from google.cloud.spanner_v1.types.spanner import Session +from google.cloud.spanner_v1.types.transaction import MultiplexedSessionPrecommitToken +from google.cloud.spanner_v1.types.transaction import Transaction +from google.cloud.spanner_v1.types.transaction import TransactionOptions +from google.cloud.spanner_v1.types.transaction import TransactionSelector +from google.cloud.spanner_v1.types.type import StructType +from google.cloud.spanner_v1.types.type import Type +from google.cloud.spanner_v1.types.type import TypeAnnotationCode +from google.cloud.spanner_v1.types.type import TypeCode + +__all__ = ('SpannerClient', + 'SpannerAsyncClient', + 'ChangeStreamRecord', + 'CommitResponse', + 'KeyRange', + 'KeySet', + 'Mutation', + 'PlanNode', + 'QueryPlan', + 'PartialResultSet', + 'ResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + 'BeginTransactionRequest', + 'CommitRequest', + 'CreateSessionRequest', + 'DeleteSessionRequest', + 'DirectedReadOptions', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'ExecuteSqlRequest', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'Partition', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'PartitionResponse', + 'ReadRequest', + 'RequestOptions', + 'RollbackRequest', + 'Session', + 'MultiplexedSessionPrecommitToken', + 'Transaction', + 'TransactionOptions', + 'TransactionSelector', + 'StructType', + 'Type', + 'TypeAnnotationCode', + 'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed new file mode 100644 index 0000000000..0989eccd04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py new file mode 100644 index 0000000000..6275c8acfb --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.spanner import SpannerClient +from .services.spanner import SpannerAsyncClient + +from .types.change_stream import ChangeStreamRecord +from .types.commit_response import CommitResponse +from .types.keys import KeyRange +from .types.keys import KeySet +from .types.mutation import Mutation +from .types.query_plan import PlanNode +from .types.query_plan import QueryPlan +from .types.result_set import PartialResultSet +from .types.result_set import ResultSet +from .types.result_set import ResultSetMetadata +from .types.result_set import ResultSetStats +from .types.spanner import BatchCreateSessionsRequest +from .types.spanner import BatchCreateSessionsResponse +from .types.spanner import BatchWriteRequest +from .types.spanner import BatchWriteResponse +from .types.spanner import BeginTransactionRequest +from .types.spanner import CommitRequest +from .types.spanner import CreateSessionRequest +from .types.spanner import DeleteSessionRequest +from .types.spanner import DirectedReadOptions +from .types.spanner import ExecuteBatchDmlRequest +from .types.spanner import ExecuteBatchDmlResponse +from .types.spanner import ExecuteSqlRequest +from .types.spanner import GetSessionRequest +from .types.spanner import ListSessionsRequest +from .types.spanner import ListSessionsResponse +from .types.spanner import Partition +from .types.spanner import PartitionOptions +from .types.spanner import PartitionQueryRequest +from .types.spanner import PartitionReadRequest +from .types.spanner import PartitionResponse +from .types.spanner import ReadRequest +from .types.spanner import RequestOptions +from .types.spanner import RollbackRequest +from .types.spanner import Session +from .types.transaction import MultiplexedSessionPrecommitToken +from .types.transaction import Transaction +from .types.transaction import TransactionOptions +from .types.transaction import TransactionSelector +from .types.type import StructType +from .types.type import Type +from .types.type import TypeAnnotationCode +from .types.type import TypeCode + +__all__ = ( + 'SpannerAsyncClient', +'BatchCreateSessionsRequest', +'BatchCreateSessionsResponse', +'BatchWriteRequest', +'BatchWriteResponse', +'BeginTransactionRequest', +'ChangeStreamRecord', +'CommitRequest', +'CommitResponse', +'CreateSessionRequest', +'DeleteSessionRequest', +'DirectedReadOptions', +'ExecuteBatchDmlRequest', +'ExecuteBatchDmlResponse', +'ExecuteSqlRequest', +'GetSessionRequest', +'KeyRange', +'KeySet', +'ListSessionsRequest', +'ListSessionsResponse', +'MultiplexedSessionPrecommitToken', +'Mutation', +'PartialResultSet', +'Partition', +'PartitionOptions', +'PartitionQueryRequest', +'PartitionReadRequest', +'PartitionResponse', +'PlanNode', +'QueryPlan', +'ReadRequest', +'RequestOptions', +'ResultSet', +'ResultSetMetadata', +'ResultSetStats', +'RollbackRequest', +'Session', +'SpannerClient', +'StructType', +'Transaction', +'TransactionOptions', +'TransactionSelector', +'Type', +'TypeAnnotationCode', +'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json new file mode 100644 index 0000000000..f5957c633a --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json @@ -0,0 +1,268 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_v1", + "protoPackage": "google.spanner.v1", + "schema": "1.0", + "services": { + "Spanner": { + "clients": { + "grpc": { + "libraryClient": "SpannerClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SpannerAsyncClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + }, + "rest": { + "libraryClient": "SpannerClient", + "rpcs": { + "BatchCreateSessions": { + "methods": [ + "batch_create_sessions" + ] + }, + "BatchWrite": { + "methods": [ + "batch_write" + ] + }, + "BeginTransaction": { + "methods": [ + "begin_transaction" + ] + }, + "Commit": { + "methods": [ + "commit" + ] + }, + "CreateSession": { + "methods": [ + "create_session" + ] + }, + "DeleteSession": { + "methods": [ + "delete_session" + ] + }, + "ExecuteBatchDml": { + "methods": [ + "execute_batch_dml" + ] + }, + "ExecuteSql": { + "methods": [ + "execute_sql" + ] + }, + "ExecuteStreamingSql": { + "methods": [ + "execute_streaming_sql" + ] + }, + "GetSession": { + "methods": [ + "get_session" + ] + }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, + "PartitionQuery": { + "methods": [ + "partition_query" + ] + }, + "PartitionRead": { + "methods": [ + "partition_read" + ] + }, + "Read": { + "methods": [ + "read" + ] + }, + "Rollback": { + "methods": [ + "rollback" + ] + }, + "StreamingRead": { + "methods": [ + "streaming_read" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed new file mode 100644 index 0000000000..0989eccd04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py new file mode 100644 index 0000000000..cbf94b283c --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py new file mode 100644 index 0000000000..a7de7a7b93 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SpannerClient +from .async_client import SpannerAsyncClient + +__all__ = ( + 'SpannerClient', + 'SpannerAsyncClient', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py new file mode 100644 index 0000000000..e3fe8cabac --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py @@ -0,0 +1,2125 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union + +from google.cloud.spanner_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport +from .client import SpannerClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class SpannerAsyncClient: + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + _client: SpannerClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SpannerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SpannerClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SpannerClient._DEFAULT_UNIVERSE + + database_path = staticmethod(SpannerClient.database_path) + parse_database_path = staticmethod(SpannerClient.parse_database_path) + session_path = staticmethod(SpannerClient.session_path) + parse_session_path = staticmethod(SpannerClient.parse_session_path) + common_billing_account_path = staticmethod(SpannerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(SpannerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(SpannerClient.common_folder_path) + parse_common_folder_path = staticmethod(SpannerClient.parse_common_folder_path) + common_organization_path = staticmethod(SpannerClient.common_organization_path) + parse_common_organization_path = staticmethod(SpannerClient.parse_common_organization_path) + common_project_path = staticmethod(SpannerClient.common_project_path) + parse_common_project_path = staticmethod(SpannerClient.parse_common_project_path) + common_location_path = staticmethod(SpannerClient.common_location_path) + parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerAsyncClient: The constructed client. + """ + return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SpannerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SpannerTransport: + """Returns the transport used by the client instance. + + Returns: + SpannerTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SpannerClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spanner async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SpannerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SpannerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner_v1.SpannerAsyncClient`.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.spanner.v1.Spanner", + "credentialsType": None, + } + ) + + async def create_session(self, + request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_create_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = await client.create_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]]): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + database (:class:`str`): + Required. The database in which the + new session is created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CreateSessionRequest): + request = spanner.CreateSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_create_sessions(self, + request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + session_count: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.BatchCreateSessionsResponse: + r"""Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = await client.batch_create_sessions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]]): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + database (:class:`str`): + Required. The database in which the + new sessions are created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_count (:class:`int`): + Required. The number of sessions to be created in this + batch call. The API can return fewer than the requested + number of sessions. If a specific number of sessions are + desired, the client can make additional calls to + ``BatchCreateSessions`` (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + + This corresponds to the ``session_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, session_count] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchCreateSessionsRequest): + request = spanner.BatchCreateSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if session_count is not None: + request.session_count = session_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_create_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_session(self, + request: Optional[Union[spanner.GetSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_get_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]]): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + name (:class:`str`): + Required. The name of the session to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.GetSessionRequest): + request = spanner.GetSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_sessions(self, + request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists all sessions in a given database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]]): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + database (:class:`str`): + Required. The database in which to + list sessions. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ListSessionsRequest): + request = spanner.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_session(self, + request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + await client.delete_session(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]]): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + name (:class:`str`): + Required. The name of the session to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.DeleteSessionRequest): + request = spanner.DeleteSessionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def execute_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_streaming_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: + r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = await client.execute_streaming_sql(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.execute_streaming_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def execute_batch_dml(self, + request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = await client.execute_batch_dml(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]]): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, + one for each DML statement that has successfully + executed, in the same order as the statements in the + request. If a statement fails, the status in the + response body identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. 2. If the status was not OK, check the + number of result sets in the response. If the + response contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed + successfully. + + \* Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. + + Example 2: + + - Request: 5 DML statements. The third statement has + a syntax error. + + \* Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement failed, + and the fourth and fifth statements were not + executed. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteBatchDmlRequest): + request = spanner.ExecuteBatchDmlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.execute_batch_dml] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = await client.read(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: + r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = await client.streaming_read(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.streaming_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def begin_transaction(self, + request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, + *, + session: Optional[str] = None, + options: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> transaction.Transaction: + r"""Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = await client.begin_transaction(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]]): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + session (:class:`str`): + Required. The session in which the + transaction runs. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + options (:class:`google.cloud.spanner_v1.types.TransactionOptions`): + Required. Options for the new + transaction. + + This corresponds to the ``options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Transaction: + A transaction. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, options] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BeginTransactionRequest): + request = spanner.BeginTransactionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if options is not None: + request.options = options + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.begin_transaction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def commit(self, + request: Optional[Union[spanner.CommitRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + mutations: Optional[MutableSequence[mutation.Mutation]] = None, + single_use_transaction: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> commit_response.CommitResponse: + r"""Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_commit(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = await client.commit(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.CommitRequest, dict]]): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + session (:class:`str`): + Required. The session in which the + transaction to be committed is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (:class:`bytes`): + Commit a previously-started + transaction. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (:class:`MutableSequence[google.cloud.spanner_v1.types.Mutation]`): + The mutations to be executed when + this transaction commits. All mutations + are applied atomically, in the order + they appear in this list. + + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + single_use_transaction (:class:`google.cloud.spanner_v1.types.TransactionOptions`): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, + commit with a temporary transaction is non-idempotent. + That is, if the ``CommitRequest`` is sent to Cloud + Spanner more than once (for instance, due to retries in + the application, or in the transport library), it's + possible that the mutations are executed more than once. + If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This corresponds to the ``single_use_transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id, mutations, single_use_transaction] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CommitRequest): + request = spanner.CommitRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + if single_use_transaction is not None: + request.single_use_transaction = single_use_transaction + if mutations: + request.mutations.extend(mutations) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.commit] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rollback(self, + request: Optional[Union[spanner.RollbackRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_rollback(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + await client.rollback(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.RollbackRequest, dict]]): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + session (:class:`str`): + Required. The session in which the + transaction to roll back is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (:class:`bytes`): + Required. The transaction to roll + back. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.RollbackRequest): + request = spanner.RollbackRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def partition_query(self, + request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.partition_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]]): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionQueryRequest): + request = spanner.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.partition_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def partition_read(self, + request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = await client.partition_read(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]]): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionReadRequest): + request = spanner.PartitionReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.partition_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_write(self, + request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, + *, + session: Optional[str] = None, + mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[spanner.BatchWriteResponse]]: + r"""Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + async def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = await client.batch_write(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]]): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + session (:class:`str`): + Required. The session in which the + batch request is to be run. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutation_groups (:class:`MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]`): + Required. The groups of mutations to + be applied. + + This corresponds to the ``mutation_groups`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.spanner_v1.types.BatchWriteResponse]: + The result of applying a batch of + mutations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, mutation_groups] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchWriteRequest): + request = spanner.BatchWriteRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if mutation_groups: + request.mutation_groups.extend(mutation_groups) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SpannerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "SpannerAsyncClient", +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py new file mode 100644 index 0000000000..bd4ac8451f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py @@ -0,0 +1,2486 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.spanner_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SpannerGrpcTransport +from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport +from .transports.rest import SpannerRestTransport + + +class SpannerClientMeta(type): + """Metaclass for the Spanner client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] + _transport_registry["grpc"] = SpannerGrpcTransport + _transport_registry["grpc_asyncio"] = SpannerGrpcAsyncIOTransport + _transport_registry["rest"] = SpannerRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SpannerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SpannerClient(metaclass=SpannerClientMeta): + """Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SpannerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SpannerTransport: + """Returns the transport used by the client instance. + + Returns: + SpannerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def database_path(project: str,instance: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def session_path(project: str,instance: str,database: str,session: str,) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str,str]: + """Parses a session path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/sessions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = SpannerClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = SpannerClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SpannerClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the spanner client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SpannerTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = SpannerClient._read_environment_variables() + self._client_cert_source = SpannerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = SpannerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SpannerTransport) + if transport_provided: + # transport is a SpannerTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SpannerTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + SpannerClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[SpannerTransport], Callable[..., SpannerTransport]] = ( + SpannerClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SpannerTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner_v1.SpannerClient`.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.spanner.v1.Spanner", + "credentialsType": None, + } + ) + + def create_session(self, + request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_create_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = client.create_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + database (str): + Required. The database in which the + new session is created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CreateSessionRequest): + request = spanner.CreateSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_sessions(self, + request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + session_count: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.BatchCreateSessionsResponse: + r"""Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = client.batch_create_sessions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + database (str): + Required. The database in which the + new sessions are created. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + session_count (int): + Required. The number of sessions to be created in this + batch call. The API can return fewer than the requested + number of sessions. If a specific number of sessions are + desired, the client can make additional calls to + ``BatchCreateSessions`` (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + + This corresponds to the ``session_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, session_count] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchCreateSessionsRequest): + request = spanner.BatchCreateSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if session_count is not None: + request.session_count = session_count + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_session(self, + request: Optional[Union[spanner.GetSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.Session: + r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_get_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_session(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + name (str): + Required. The name of the session to + retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Session: + A session in the Cloud Spanner API. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.GetSessionRequest): + request = spanner.GetSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_sessions(self, + request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists all sessions in a given database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + database (str): + Required. The database in which to + list sessions. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ListSessionsRequest): + request = spanner.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_session(self, + request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + client.delete_session(request=request) + + Args: + request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + name (str): + Required. The name of the session to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.DeleteSessionRequest): + request = spanner.DeleteSessionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_session] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def execute_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_streaming_sql(self, + request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[result_set.PartialResultSet]: + r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = client.execute_streaming_sql(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteSqlRequest): + request = spanner.ExecuteSqlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_streaming_sql] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def execute_batch_dml(self, + request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = client.execute_batch_dml(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, + one for each DML statement that has successfully + executed, in the same order as the statements in the + request. If a statement fails, the status in the + response body identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value OK + indicates that all statements were executed + successfully. 2. If the status was not OK, check the + number of result sets in the response. If the + response contains N + [ResultSet][google.spanner.v1.ResultSet] messages, + then statement N+1 in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed + successfully. + + \* Response: 5 + [ResultSet][google.spanner.v1.ResultSet] messages, + with the status OK. + + Example 2: + + - Request: 5 DML statements. The third statement has + a syntax error. + + \* Response: 2 + [ResultSet][google.spanner.v1.ResultSet] messages, + and a syntax error (INVALID_ARGUMENT) status. The + number of [ResultSet][google.spanner.v1.ResultSet] + messages indicates that the third statement failed, + and the fourth and fifth statements were not + executed. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ExecuteBatchDmlRequest): + request = spanner.ExecuteBatchDmlRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.execute_batch_dml] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> result_set.ResultSet: + r"""Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = client.read(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_read(self, + request: Optional[Union[spanner.ReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[result_set.PartialResultSet]: + r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = client.streaming_read(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.spanner_v1.types.PartialResultSet]: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.ReadRequest): + request = spanner.ReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def begin_transaction(self, + request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, + *, + session: Optional[str] = None, + options: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> transaction.Transaction: + r"""Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + session (str): + Required. The session in which the + transaction runs. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + options (google.cloud.spanner_v1.types.TransactionOptions): + Required. Options for the new + transaction. + + This corresponds to the ``options`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.Transaction: + A transaction. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, options] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BeginTransactionRequest): + request = spanner.BeginTransactionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if options is not None: + request.options = options + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.begin_transaction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def commit(self, + request: Optional[Union[spanner.CommitRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + mutations: Optional[MutableSequence[mutation.Mutation]] = None, + single_use_transaction: Optional[transaction.TransactionOptions] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> commit_response.CommitResponse: + r"""Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_commit(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + session (str): + Required. The session in which the + transaction to be committed is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (bytes): + Commit a previously-started + transaction. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + The mutations to be executed when + this transaction commits. All mutations + are applied atomically, in the order + they appear in this list. + + This corresponds to the ``mutations`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, + commit with a temporary transaction is non-idempotent. + That is, if the ``CommitRequest`` is sent to Cloud + Spanner more than once (for instance, due to retries in + the application, or in the transport library), it's + possible that the mutations are executed more than once. + If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This corresponds to the ``single_use_transaction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id, mutations, single_use_transaction] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.CommitRequest): + request = spanner.CommitRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + if mutations is not None: + request.mutations = mutations + if single_use_transaction is not None: + request.single_use_transaction = single_use_transaction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rollback(self, + request: Optional[Union[spanner.RollbackRequest, dict]] = None, + *, + session: Optional[str] = None, + transaction_id: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_rollback(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + client.rollback(request=request) + + Args: + request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + session (str): + Required. The session in which the + transaction to roll back is running. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transaction_id (bytes): + Required. The transaction to roll + back. + + This corresponds to the ``transaction_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, transaction_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.RollbackRequest): + request = spanner.RollbackRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if transaction_id is not None: + request.transaction_id = transaction_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def partition_query(self, + request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.partition_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionQueryRequest): + request = spanner.PartitionQueryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def partition_read(self, + request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner.PartitionResponse: + r"""Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = client.partition_read(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_v1.types.PartitionResponse: + The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.PartitionReadRequest): + request = spanner.PartitionReadRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.partition_read] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_write(self, + request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, + *, + session: Optional[str] = None, + mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[spanner.BatchWriteResponse]: + r"""Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_v1 + + def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = client.batch_write(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + session (str): + Required. The session in which the + batch request is to be run. + + This corresponds to the ``session`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): + Required. The groups of mutations to + be applied. + + This corresponds to the ``mutation_groups`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]: + The result of applying a batch of + mutations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [session, mutation_groups] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner.BatchWriteRequest): + request = spanner.BatchWriteRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if session is not None: + request.session = session + if mutation_groups is not None: + request.mutation_groups = mutation_groups + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_write] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("session", request.session), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SpannerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "SpannerClient", +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py new file mode 100644 index 0000000000..0969af8f6f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_v1.types import spanner + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner.ListSessionsResponse], + request: spanner.ListSessionsRequest, + response: spanner.ListSessionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.spanner_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner.ListSessionsResponse]], + request: spanner.ListSessionsRequest, + response: spanner.ListSessionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.spanner_v1.types.ListSessionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner.ListSessionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst new file mode 100644 index 0000000000..99997401d5 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SpannerTransport` is the ABC for all transports. +- public child `SpannerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SpannerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSpannerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SpannerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py new file mode 100644 index 0000000000..ee0b1ae37f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SpannerTransport +from .grpc import SpannerGrpcTransport +from .grpc_asyncio import SpannerGrpcAsyncIOTransport +from .rest import SpannerRestTransport +from .rest import SpannerRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] +_transport_registry['grpc'] = SpannerGrpcTransport +_transport_registry['grpc_asyncio'] = SpannerGrpcAsyncIOTransport +_transport_registry['rest'] = SpannerRestTransport + +__all__ = ( + 'SpannerTransport', + 'SpannerGrpcTransport', + 'SpannerGrpcAsyncIOTransport', + 'SpannerRestTransport', + 'SpannerRestInterceptor', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py new file mode 100644 index 0000000000..0c7b14364b --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py @@ -0,0 +1,505 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.spanner_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SpannerTransport(abc.ABC): + """Abstract transport class for Spanner.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_session: gapic_v1.method.wrap_method( + self.create_session, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_create_sessions: gapic_v1.method.wrap_method( + self.batch_create_sessions, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_session: gapic_v1.method.wrap_method( + self.get_session, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_session: gapic_v1.method.wrap_method( + self.delete_session, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_sql: gapic_v1.method.wrap_method( + self.execute_sql, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_streaming_sql: gapic_v1.method.wrap_method( + self.execute_streaming_sql, + default_timeout=3600.0, + client_info=client_info, + ), + self.execute_batch_dml: gapic_v1.method.wrap_method( + self.execute_batch_dml, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.read: gapic_v1.method.wrap_method( + self.read, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.streaming_read: gapic_v1.method.wrap_method( + self.streaming_read, + default_timeout=3600.0, + client_info=client_info, + ), + self.begin_transaction: gapic_v1.method.wrap_method( + self.begin_transaction, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.commit: gapic_v1.method.wrap_method( + self.commit, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.rollback: gapic_v1.method.wrap_method( + self.rollback, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_query: gapic_v1.method.wrap_method( + self.partition_query, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_read: gapic_v1.method.wrap_method( + self.partition_read, + default_retry=retries.Retry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_write: gapic_v1.method.wrap_method( + self.batch_write, + default_timeout=3600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + Union[ + spanner.Session, + Awaitable[spanner.Session] + ]]: + raise NotImplementedError() + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + Union[ + spanner.BatchCreateSessionsResponse, + Awaitable[spanner.BatchCreateSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + Union[ + spanner.Session, + Awaitable[spanner.Session] + ]]: + raise NotImplementedError() + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + Union[ + spanner.ListSessionsResponse, + Awaitable[spanner.ListSessionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Union[ + result_set.ResultSet, + Awaitable[result_set.ResultSet] + ]]: + raise NotImplementedError() + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Union[ + result_set.PartialResultSet, + Awaitable[result_set.PartialResultSet] + ]]: + raise NotImplementedError() + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + Union[ + spanner.ExecuteBatchDmlResponse, + Awaitable[spanner.ExecuteBatchDmlResponse] + ]]: + raise NotImplementedError() + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + Union[ + result_set.ResultSet, + Awaitable[result_set.ResultSet] + ]]: + raise NotImplementedError() + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + Union[ + result_set.PartialResultSet, + Awaitable[result_set.PartialResultSet] + ]]: + raise NotImplementedError() + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + Union[ + transaction.Transaction, + Awaitable[transaction.Transaction] + ]]: + raise NotImplementedError() + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + Union[ + commit_response.CommitResponse, + Awaitable[commit_response.CommitResponse] + ]]: + raise NotImplementedError() + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + Union[ + spanner.PartitionResponse, + Awaitable[spanner.PartitionResponse] + ]]: + raise NotImplementedError() + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + Union[ + spanner.PartitionResponse, + Awaitable[spanner.PartitionResponse] + ]]: + raise NotImplementedError() + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + Union[ + spanner.BatchWriteResponse, + Awaitable[spanner.BatchWriteResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'SpannerTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py new file mode 100644 index 0000000000..064199f57e --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py @@ -0,0 +1,899 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore +from .base import SpannerTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SpannerGrpcTransport(SpannerTransport): + """gRPC backend transport for Spanner. + + Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + spanner.Session]: + r"""Return a callable for the create session method over gRPC. + + Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + Returns: + Callable[[~.CreateSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_session' not in self._stubs: + self._stubs['create_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=spanner.CreateSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['create_session'] + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + spanner.BatchCreateSessionsResponse]: + r"""Return a callable for the batch create sessions method over gRPC. + + Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Returns: + Callable[[~.BatchCreateSessionsRequest], + ~.BatchCreateSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_sessions' not in self._stubs: + self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=spanner.BatchCreateSessionsRequest.serialize, + response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, + ) + return self._stubs['batch_create_sessions'] + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + spanner.Session]: + r"""Return a callable for the get session method over gRPC. + + Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + Returns: + Callable[[~.GetSessionRequest], + ~.Session]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_session' not in self._stubs: + self._stubs['get_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=spanner.GetSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['get_session'] + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + spanner.ListSessionsResponse]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all sessions in a given database. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=spanner.ListSessionsRequest.serialize, + response_deserializer=spanner.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete session method over gRPC. + + Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + Returns: + Callable[[~.DeleteSessionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_session' not in self._stubs: + self._stubs['delete_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=spanner.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_session'] + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.ResultSet]: + r"""Return a callable for the execute sql method over gRPC. + + Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.ResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_sql' not in self._stubs: + self._stubs['execute_sql'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['execute_sql'] + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.PartialResultSet]: + r"""Return a callable for the execute streaming sql method over gRPC. + + Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + ~.PartialResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_streaming_sql' not in self._stubs: + self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['execute_streaming_sql'] + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + spanner.ExecuteBatchDmlResponse]: + r"""Return a callable for the execute batch dml method over gRPC. + + Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Returns: + Callable[[~.ExecuteBatchDmlRequest], + ~.ExecuteBatchDmlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_batch_dml' not in self._stubs: + self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=spanner.ExecuteBatchDmlRequest.serialize, + response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, + ) + return self._stubs['execute_batch_dml'] + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + result_set.ResultSet]: + r"""Return a callable for the read method over gRPC. + + Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Returns: + Callable[[~.ReadRequest], + ~.ResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'read' not in self._stubs: + self._stubs['read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['read'] + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + result_set.PartialResultSet]: + r"""Return a callable for the streaming read method over gRPC. + + Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Returns: + Callable[[~.ReadRequest], + ~.PartialResultSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_read' not in self._stubs: + self._stubs['streaming_read'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['streaming_read'] + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + transaction.Transaction]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Returns: + Callable[[~.BeginTransactionRequest], + ~.Transaction]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=spanner.BeginTransactionRequest.serialize, + response_deserializer=transaction.Transaction.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + commit_response.CommitResponse]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Returns: + Callable[[~.CommitRequest], + ~.CommitResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=spanner.CommitRequest.serialize, + response_deserializer=commit_response.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + empty_pb2.Empty]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + Returns: + Callable[[~.RollbackRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=spanner.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + spanner.PartitionResponse]: + r"""Return a callable for the partition query method over gRPC. + + Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionQueryRequest], + ~.PartitionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=spanner.PartitionQueryRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + spanner.PartitionResponse]: + r"""Return a callable for the partition read method over gRPC. + + Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionReadRequest], + ~.PartitionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_read' not in self._stubs: + self._stubs['partition_read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=spanner.PartitionReadRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_read'] + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + spanner.BatchWriteResponse]: + r"""Return a callable for the batch write method over gRPC. + + Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + Returns: + Callable[[~.BatchWriteRequest], + ~.BatchWriteResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_write' not in self._stubs: + self._stubs['batch_write'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/BatchWrite', + request_serializer=spanner.BatchWriteRequest.serialize, + response_deserializer=spanner.BatchWriteResponse.deserialize, + ) + return self._stubs['batch_write'] + + def close(self): + self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'SpannerGrpcTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py new file mode 100644 index 0000000000..3efcf7071f --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py @@ -0,0 +1,1125 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore +from .base import SpannerTransport, DEFAULT_CLIENT_INFO +from .grpc import SpannerGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SpannerGrpcAsyncIOTransport(SpannerTransport): + """gRPC AsyncIO backend transport for Spanner. + + Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + Awaitable[spanner.Session]]: + r"""Return a callable for the create session method over gRPC. + + Creates a new session. A session can be used to perform + transactions that read and/or modify data in a Cloud Spanner + database. Sessions are meant to be reused for many consecutive + transactions. + + Sessions can only execute one transaction at a time. To execute + multiple concurrent read-write/write-only transactions, create + multiple sessions. Note that standalone reads and queries use a + transaction internally, and count toward the one transaction + limit. + + Active sessions use additional server resources, so it's a good + idea to delete idle and unneeded sessions. Aside from explicit + deletes, Cloud Spanner can delete sessions when no operations + are sent for more than an hour. If a session is deleted, + requests to it return ``NOT_FOUND``. + + Idle sessions can be kept alive by sending a trivial SQL query + periodically, for example, ``"SELECT 1"``. + + Returns: + Callable[[~.CreateSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_session' not in self._stubs: + self._stubs['create_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/CreateSession', + request_serializer=spanner.CreateSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['create_session'] + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + Awaitable[spanner.BatchCreateSessionsResponse]]: + r"""Return a callable for the batch create sessions method over gRPC. + + Creates multiple new sessions. + + This API can be used to initialize a session cache on + the clients. See https://goo.gl/TgSFN2 for best + practices on session cache management. + + Returns: + Callable[[~.BatchCreateSessionsRequest], + Awaitable[~.BatchCreateSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_create_sessions' not in self._stubs: + self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BatchCreateSessions', + request_serializer=spanner.BatchCreateSessionsRequest.serialize, + response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, + ) + return self._stubs['batch_create_sessions'] + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + Awaitable[spanner.Session]]: + r"""Return a callable for the get session method over gRPC. + + Gets a session. Returns ``NOT_FOUND`` if the session doesn't + exist. This is mainly useful for determining whether a session + is still alive. + + Returns: + Callable[[~.GetSessionRequest], + Awaitable[~.Session]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_session' not in self._stubs: + self._stubs['get_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/GetSession', + request_serializer=spanner.GetSessionRequest.serialize, + response_deserializer=spanner.Session.deserialize, + ) + return self._stubs['get_session'] + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + Awaitable[spanner.ListSessionsResponse]]: + r"""Return a callable for the list sessions method over gRPC. + + Lists all sessions in a given database. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sessions' not in self._stubs: + self._stubs['list_sessions'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ListSessions', + request_serializer=spanner.ListSessionsRequest.serialize, + response_deserializer=spanner.ListSessionsResponse.deserialize, + ) + return self._stubs['list_sessions'] + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete session method over gRPC. + + Ends a session, releasing server resources associated + with it. This asynchronously triggers the cancellation + of any operations that are running with this session. + + Returns: + Callable[[~.DeleteSessionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_session' not in self._stubs: + self._stubs['delete_session'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/DeleteSession', + request_serializer=spanner.DeleteSessionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_session'] + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Awaitable[result_set.ResultSet]]: + r"""Return a callable for the execute sql method over gRPC. + + Executes an SQL statement, returning all results in a single + reply. This method can't be used to return a result set larger + than 10 MiB; if the query yields more data than that, the query + fails with a ``FAILED_PRECONDITION`` error. + + Operations inside read-write transactions might return + ``ABORTED``. If this occurs, the application should restart the + transaction from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be fetched in streaming fashion by + calling + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + instead. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.ResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_sql' not in self._stubs: + self._stubs['execute_sql'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['execute_sql'] + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + Awaitable[result_set.PartialResultSet]]: + r"""Return a callable for the execute streaming sql method over gRPC. + + Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except + returns the result set as a stream. Unlike + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no + limit on the size of the returned result set. However, no + individual row in the result set can exceed 100 MiB, and no + column value can exceed 10 MiB. + + The query string can be SQL or `Graph Query Language + (GQL) `__. + + Returns: + Callable[[~.ExecuteSqlRequest], + Awaitable[~.PartialResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_streaming_sql' not in self._stubs: + self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/ExecuteStreamingSql', + request_serializer=spanner.ExecuteSqlRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['execute_streaming_sql'] + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + Awaitable[spanner.ExecuteBatchDmlResponse]]: + r"""Return a callable for the execute batch dml method over gRPC. + + Executes a batch of SQL DML statements. This method allows many + statements to be run with lower latency than submitting them + sequentially with + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Statements are executed in sequential order. A request can + succeed even if a statement fails. The + [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] + field in the response provides information about the statement + that failed. Clients must inspect this field to determine + whether an error occurred. + + Execution stops after the first failed statement; the remaining + statements are not executed. + + Returns: + Callable[[~.ExecuteBatchDmlRequest], + Awaitable[~.ExecuteBatchDmlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'execute_batch_dml' not in self._stubs: + self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/ExecuteBatchDml', + request_serializer=spanner.ExecuteBatchDmlRequest.serialize, + response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, + ) + return self._stubs['execute_batch_dml'] + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + Awaitable[result_set.ResultSet]]: + r"""Return a callable for the read method over gRPC. + + Reads rows from the database using key lookups and scans, as a + simple key/value style alternative to + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method + can't be used to return a result set larger than 10 MiB; if the + read matches more data than that, the read fails with a + ``FAILED_PRECONDITION`` error. + + Reads inside read-write transactions might return ``ABORTED``. + If this occurs, the application should restart the transaction + from the beginning. See + [Transaction][google.spanner.v1.Transaction] for more details. + + Larger result sets can be yielded in streaming fashion by + calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] + instead. + + Returns: + Callable[[~.ReadRequest], + Awaitable[~.ResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'read' not in self._stubs: + self._stubs['read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Read', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.ResultSet.deserialize, + ) + return self._stubs['read'] + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + Awaitable[result_set.PartialResultSet]]: + r"""Return a callable for the streaming read method over gRPC. + + Like [Read][google.spanner.v1.Spanner.Read], except returns the + result set as a stream. Unlike + [Read][google.spanner.v1.Spanner.Read], there is no limit on the + size of the returned result set. However, no individual row in + the result set can exceed 100 MiB, and no column value can + exceed 10 MiB. + + Returns: + Callable[[~.ReadRequest], + Awaitable[~.PartialResultSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_read' not in self._stubs: + self._stubs['streaming_read'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/StreamingRead', + request_serializer=spanner.ReadRequest.serialize, + response_deserializer=result_set.PartialResultSet.deserialize, + ) + return self._stubs['streaming_read'] + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + Awaitable[transaction.Transaction]]: + r"""Return a callable for the begin transaction method over gRPC. + + Begins a new transaction. This step can often be skipped: + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [Commit][google.spanner.v1.Spanner.Commit] can begin a new + transaction as a side-effect. + + Returns: + Callable[[~.BeginTransactionRequest], + Awaitable[~.Transaction]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'begin_transaction' not in self._stubs: + self._stubs['begin_transaction'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/BeginTransaction', + request_serializer=spanner.BeginTransactionRequest.serialize, + response_deserializer=transaction.Transaction.deserialize, + ) + return self._stubs['begin_transaction'] + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + Awaitable[commit_response.CommitResponse]]: + r"""Return a callable for the commit method over gRPC. + + Commits a transaction. The request includes the mutations to be + applied to rows in the database. + + ``Commit`` might return an ``ABORTED`` error. This can occur at + any time; commonly, the cause is conflicts with concurrent + transactions. However, it can also happen for a variety of other + reasons. If ``Commit`` returns ``ABORTED``, the caller should + retry the transaction from the beginning, reusing the same + session. + + On very rare occasions, ``Commit`` might return ``UNKNOWN``. + This can happen, for example, if the client job experiences a 1+ + hour networking failure. At that point, Cloud Spanner has lost + track of the transaction outcome and we recommend that you + perform another read from the database to see the state of + things as they are now. + + Returns: + Callable[[~.CommitRequest], + Awaitable[~.CommitResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'commit' not in self._stubs: + self._stubs['commit'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Commit', + request_serializer=spanner.CommitRequest.serialize, + response_deserializer=commit_response.CommitResponse.deserialize, + ) + return self._stubs['commit'] + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the rollback method over gRPC. + + Rolls back a transaction, releasing any locks it holds. It's a + good idea to call this for any transaction that includes one or + more [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and + ultimately decides not to commit. + + ``Rollback`` returns ``OK`` if it successfully aborts the + transaction, the transaction was already aborted, or the + transaction isn't found. ``Rollback`` never returns ``ABORTED``. + + Returns: + Callable[[~.RollbackRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rollback' not in self._stubs: + self._stubs['rollback'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/Rollback', + request_serializer=spanner.RollbackRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['rollback'] + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + Awaitable[spanner.PartitionResponse]]: + r"""Return a callable for the partition query method over gRPC. + + Creates a set of partition tokens that can be used to execute a + query operation in parallel. Each of the returned partition + tokens can be used by + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + to specify a subset of the query result to read. The same + session and read-only transaction must be used by the + ``PartitionQueryRequest`` used to create the partition tokens + and the ``ExecuteSqlRequests`` that use the partition tokens. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the query, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionQueryRequest], + Awaitable[~.PartitionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_query' not in self._stubs: + self._stubs['partition_query'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionQuery', + request_serializer=spanner.PartitionQueryRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_query'] + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + Awaitable[spanner.PartitionResponse]]: + r"""Return a callable for the partition read method over gRPC. + + Creates a set of partition tokens that can be used to execute a + read operation in parallel. Each of the returned partition + tokens can be used by + [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to + specify a subset of the read result to read. The same session + and read-only transaction must be used by the + ``PartitionReadRequest`` used to create the partition tokens and + the ``ReadRequests`` that use the partition tokens. There are no + ordering guarantees on rows returned among the returned + partition tokens, or even within each individual + ``StreamingRead`` call issued with a ``partition_token``. + + Partition tokens become invalid when the session used to create + them is deleted, is idle for too long, begins a new transaction, + or becomes too old. When any of these happen, it isn't possible + to resume the read, and the whole operation must be restarted + from the beginning. + + Returns: + Callable[[~.PartitionReadRequest], + Awaitable[~.PartitionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'partition_read' not in self._stubs: + self._stubs['partition_read'] = self._logged_channel.unary_unary( + '/google.spanner.v1.Spanner/PartitionRead', + request_serializer=spanner.PartitionReadRequest.serialize, + response_deserializer=spanner.PartitionResponse.deserialize, + ) + return self._stubs['partition_read'] + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + Awaitable[spanner.BatchWriteResponse]]: + r"""Return a callable for the batch write method over gRPC. + + Batches the supplied mutation groups in a collection of + efficient transactions. All mutations in a group are committed + atomically. However, mutations across groups can be committed + non-atomically in an unspecified order and thus, they must be + independent of each other. Partial failure is possible, that is, + some groups might have been committed successfully, while some + might have failed. The results of individual batches are + streamed into the response as the batches are applied. + + ``BatchWrite`` requests are not replay protected, meaning that + each mutation group can be applied more than once. Replays of + non-idempotent mutations can have undesirable effects. For + example, replays of an insert mutation can produce an already + exists error or if you use generated or commit timestamp-based + keys, it can result in additional rows being added to the + mutation's table. We recommend structuring your mutation groups + to be idempotent to avoid this issue. + + Returns: + Callable[[~.BatchWriteRequest], + Awaitable[~.BatchWriteResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_write' not in self._stubs: + self._stubs['batch_write'] = self._logged_channel.unary_stream( + '/google.spanner.v1.Spanner/BatchWrite', + request_serializer=spanner.BatchWriteRequest.serialize, + response_deserializer=spanner.BatchWriteResponse.deserialize, + ) + return self._stubs['batch_write'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_session: self._wrap_method( + self.create_session, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_create_sessions: self._wrap_method( + self.batch_create_sessions, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_session: self._wrap_method( + self.get_session, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.list_sessions: self._wrap_method( + self.list_sessions, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_session: self._wrap_method( + self.delete_session, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_sql: self._wrap_method( + self.execute_sql, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.execute_streaming_sql: self._wrap_method( + self.execute_streaming_sql, + default_timeout=3600.0, + client_info=client_info, + ), + self.execute_batch_dml: self._wrap_method( + self.execute_batch_dml, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.read: self._wrap_method( + self.read, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.streaming_read: self._wrap_method( + self.streaming_read, + default_timeout=3600.0, + client_info=client_info, + ), + self.begin_transaction: self._wrap_method( + self.begin_transaction, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.commit: self._wrap_method( + self.commit, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.rollback: self._wrap_method( + self.rollback, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_query: self._wrap_method( + self.partition_query, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.partition_read: self._wrap_method( + self.partition_read, + default_retry=retries.AsyncRetry( + initial=0.25, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.batch_write: self._wrap_method( + self.batch_write, + default_timeout=3600.0, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + +__all__ = ( + 'SpannerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py new file mode 100644 index 0000000000..206ddb9999 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py @@ -0,0 +1,2898 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + + +from .rest_base import _BaseSpannerRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SpannerRestInterceptor: + """Interceptor for Spanner. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SpannerRestTransport. + + .. code-block:: python + class MyCustomSpannerInterceptor(SpannerRestInterceptor): + def pre_batch_create_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_sessions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_write(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_write(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_begin_transaction(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_begin_transaction(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_commit(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_commit(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_session(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_execute_batch_dml(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_batch_dml(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_execute_sql(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_sql(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_execute_streaming_sql(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_execute_streaming_sql(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_session(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_session(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_sessions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sessions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_partition_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_partition_query(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_partition_read(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_partition_read(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_read(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_read(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_streaming_read(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_streaming_read(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SpannerRestTransport(interceptor=MyCustomSpannerInterceptor()) + client = SpannerClient(transport=transport) + + + """ + def pre_batch_create_sessions(self, request: spanner.BatchCreateSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for batch_create_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_batch_create_sessions(self, response: spanner.BatchCreateSessionsResponse) -> spanner.BatchCreateSessionsResponse: + """Post-rpc interceptor for batch_create_sessions + + DEPRECATED. Please use the `post_batch_create_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_batch_create_sessions` interceptor runs + before the `post_batch_create_sessions_with_metadata` interceptor. + """ + return response + + def post_batch_create_sessions_with_metadata(self, response: spanner.BatchCreateSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_batch_create_sessions_with_metadata` + interceptor in new development instead of the `post_batch_create_sessions` interceptor. + When both interceptors are used, this `post_batch_create_sessions_with_metadata` interceptor runs after the + `post_batch_create_sessions` interceptor. The (possibly modified) response returned by + `post_batch_create_sessions` will be passed to + `post_batch_create_sessions_with_metadata`. + """ + return response, metadata + + def pre_batch_write(self, request: spanner.BatchWriteRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchWriteRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for batch_write + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_batch_write(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for batch_write + + DEPRECATED. Please use the `post_batch_write_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_batch_write` interceptor runs + before the `post_batch_write_with_metadata` interceptor. + """ + return response + + def post_batch_write_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_write + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_batch_write_with_metadata` + interceptor in new development instead of the `post_batch_write` interceptor. + When both interceptors are used, this `post_batch_write_with_metadata` interceptor runs after the + `post_batch_write` interceptor. The (possibly modified) response returned by + `post_batch_write` will be passed to + `post_batch_write_with_metadata`. + """ + return response, metadata + + def pre_begin_transaction(self, request: spanner.BeginTransactionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for begin_transaction + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_begin_transaction(self, response: transaction.Transaction) -> transaction.Transaction: + """Post-rpc interceptor for begin_transaction + + DEPRECATED. Please use the `post_begin_transaction_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_begin_transaction` interceptor runs + before the `post_begin_transaction_with_metadata` interceptor. + """ + return response + + def post_begin_transaction_with_metadata(self, response: transaction.Transaction, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transaction.Transaction, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for begin_transaction + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_begin_transaction_with_metadata` + interceptor in new development instead of the `post_begin_transaction` interceptor. + When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the + `post_begin_transaction` interceptor. The (possibly modified) response returned by + `post_begin_transaction` will be passed to + `post_begin_transaction_with_metadata`. + """ + return response, metadata + + def pre_commit(self, request: spanner.CommitRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for commit + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_commit(self, response: commit_response.CommitResponse) -> commit_response.CommitResponse: + """Post-rpc interceptor for commit + + DEPRECATED. Please use the `post_commit_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_commit` interceptor runs + before the `post_commit_with_metadata` interceptor. + """ + return response + + def post_commit_with_metadata(self, response: commit_response.CommitResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[commit_response.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_commit_with_metadata` + interceptor in new development instead of the `post_commit` interceptor. + When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the + `post_commit` interceptor. The (possibly modified) response returned by + `post_commit` will be passed to + `post_commit_with_metadata`. + """ + return response, metadata + + def pre_create_session(self, request: spanner.CreateSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CreateSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_create_session(self, response: spanner.Session) -> spanner.Session: + """Post-rpc interceptor for create_session + + DEPRECATED. Please use the `post_create_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_create_session` interceptor runs + before the `post_create_session_with_metadata` interceptor. + """ + return response + + def post_create_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_create_session_with_metadata` + interceptor in new development instead of the `post_create_session` interceptor. + When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the + `post_create_session` interceptor. The (possibly modified) response returned by + `post_create_session` will be passed to + `post_create_session_with_metadata`. + """ + return response, metadata + + def pre_delete_session(self, request: spanner.DeleteSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.DeleteSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def pre_execute_batch_dml(self, request: spanner.ExecuteBatchDmlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for execute_batch_dml + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_execute_batch_dml(self, response: spanner.ExecuteBatchDmlResponse) -> spanner.ExecuteBatchDmlResponse: + """Post-rpc interceptor for execute_batch_dml + + DEPRECATED. Please use the `post_execute_batch_dml_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_execute_batch_dml` interceptor runs + before the `post_execute_batch_dml_with_metadata` interceptor. + """ + return response + + def post_execute_batch_dml_with_metadata(self, response: spanner.ExecuteBatchDmlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_batch_dml + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_execute_batch_dml_with_metadata` + interceptor in new development instead of the `post_execute_batch_dml` interceptor. + When both interceptors are used, this `post_execute_batch_dml_with_metadata` interceptor runs after the + `post_execute_batch_dml` interceptor. The (possibly modified) response returned by + `post_execute_batch_dml` will be passed to + `post_execute_batch_dml_with_metadata`. + """ + return response, metadata + + def pre_execute_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for execute_sql + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_execute_sql(self, response: result_set.ResultSet) -> result_set.ResultSet: + """Post-rpc interceptor for execute_sql + + DEPRECATED. Please use the `post_execute_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_execute_sql` interceptor runs + before the `post_execute_sql_with_metadata` interceptor. + """ + return response + + def post_execute_sql_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_execute_sql_with_metadata` + interceptor in new development instead of the `post_execute_sql` interceptor. + When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the + `post_execute_sql` interceptor. The (possibly modified) response returned by + `post_execute_sql` will be passed to + `post_execute_sql_with_metadata`. + """ + return response, metadata + + def pre_execute_streaming_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for execute_streaming_sql + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_execute_streaming_sql(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for execute_streaming_sql + + DEPRECATED. Please use the `post_execute_streaming_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_execute_streaming_sql` interceptor runs + before the `post_execute_streaming_sql_with_metadata` interceptor. + """ + return response + + def post_execute_streaming_sql_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_streaming_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_execute_streaming_sql_with_metadata` + interceptor in new development instead of the `post_execute_streaming_sql` interceptor. + When both interceptors are used, this `post_execute_streaming_sql_with_metadata` interceptor runs after the + `post_execute_streaming_sql` interceptor. The (possibly modified) response returned by + `post_execute_streaming_sql` will be passed to + `post_execute_streaming_sql_with_metadata`. + """ + return response, metadata + + def pre_get_session(self, request: spanner.GetSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.GetSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_session + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_get_session(self, response: spanner.Session) -> spanner.Session: + """Post-rpc interceptor for get_session + + DEPRECATED. Please use the `post_get_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_get_session` interceptor runs + before the `post_get_session_with_metadata` interceptor. + """ + return response + + def post_get_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_get_session_with_metadata` + interceptor in new development instead of the `post_get_session` interceptor. + When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the + `post_get_session` interceptor. The (possibly modified) response returned by + `post_get_session` will be passed to + `post_get_session_with_metadata`. + """ + return response, metadata + + def pre_list_sessions(self, request: spanner.ListSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_sessions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_list_sessions(self, response: spanner.ListSessionsResponse) -> spanner.ListSessionsResponse: + """Post-rpc interceptor for list_sessions + + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. + """ + return response + + def post_list_sessions_with_metadata(self, response: spanner.ListSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + + def pre_partition_query(self, request: spanner.PartitionQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for partition_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_partition_query(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: + """Post-rpc interceptor for partition_query + + DEPRECATED. Please use the `post_partition_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_partition_query` interceptor runs + before the `post_partition_query_with_metadata` interceptor. + """ + return response + + def post_partition_query_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for partition_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_partition_query_with_metadata` + interceptor in new development instead of the `post_partition_query` interceptor. + When both interceptors are used, this `post_partition_query_with_metadata` interceptor runs after the + `post_partition_query` interceptor. The (possibly modified) response returned by + `post_partition_query` will be passed to + `post_partition_query_with_metadata`. + """ + return response, metadata + + def pre_partition_read(self, request: spanner.PartitionReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for partition_read + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_partition_read(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: + """Post-rpc interceptor for partition_read + + DEPRECATED. Please use the `post_partition_read_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_partition_read` interceptor runs + before the `post_partition_read_with_metadata` interceptor. + """ + return response + + def post_partition_read_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for partition_read + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_partition_read_with_metadata` + interceptor in new development instead of the `post_partition_read` interceptor. + When both interceptors are used, this `post_partition_read_with_metadata` interceptor runs after the + `post_partition_read` interceptor. The (possibly modified) response returned by + `post_partition_read` will be passed to + `post_partition_read_with_metadata`. + """ + return response, metadata + + def pre_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for read + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_read(self, response: result_set.ResultSet) -> result_set.ResultSet: + """Post-rpc interceptor for read + + DEPRECATED. Please use the `post_read_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_read` interceptor runs + before the `post_read_with_metadata` interceptor. + """ + return response + + def post_read_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for read + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_read_with_metadata` + interceptor in new development instead of the `post_read` interceptor. + When both interceptors are used, this `post_read_with_metadata` interceptor runs after the + `post_read` interceptor. The (possibly modified) response returned by + `post_read` will be passed to + `post_read_with_metadata`. + """ + return response, metadata + + def pre_rollback(self, request: spanner.RollbackRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for rollback + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def pre_streaming_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for streaming_read + + Override in a subclass to manipulate the request or metadata + before they are sent to the Spanner server. + """ + return request, metadata + + def post_streaming_read(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for streaming_read + + DEPRECATED. Please use the `post_streaming_read_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Spanner server but before + it is returned to user code. This `post_streaming_read` interceptor runs + before the `post_streaming_read_with_metadata` interceptor. + """ + return response + + def post_streaming_read_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for streaming_read + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Spanner server but before it is returned to user code. + + We recommend only using this `post_streaming_read_with_metadata` + interceptor in new development instead of the `post_streaming_read` interceptor. + When both interceptors are used, this `post_streaming_read_with_metadata` interceptor runs after the + `post_streaming_read` interceptor. The (possibly modified) response returned by + `post_streaming_read` will be passed to + `post_streaming_read_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class SpannerRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SpannerRestInterceptor + + +class SpannerRestTransport(_BaseSpannerRestTransport): + """REST backend synchronous transport for Spanner. + + Cloud Spanner API + + The Cloud Spanner API can be used to manage sessions and execute + transactions on data stored in Cloud Spanner databases. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SpannerRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SpannerRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateSessions(_BaseSpannerRestTransport._BaseBatchCreateSessions, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.BatchCreateSessions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.BatchCreateSessionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.BatchCreateSessionsResponse: + r"""Call the batch create sessions method over HTTP. + + Args: + request (~.spanner.BatchCreateSessionsRequest): + The request object. The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.BatchCreateSessionsResponse: + The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + """ + + http_options = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_http_options() + + request, metadata = self._interceptor.pre_batch_create_sessions(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.BatchCreateSessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchCreateSessions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._BatchCreateSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.BatchCreateSessionsResponse() + pb_resp = spanner.BatchCreateSessionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_batch_create_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_sessions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.BatchCreateSessionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.batch_create_sessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchCreateSessions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BatchWrite(_BaseSpannerRestTransport._BaseBatchWrite, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.BatchWrite") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__(self, + request: spanner.BatchWriteRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the batch write method over HTTP. + + Args: + request (~.spanner.BatchWriteRequest): + The request object. The request for + [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.BatchWriteResponse: + The result of applying a batch of + mutations. + + """ + + http_options = _BaseSpannerRestTransport._BaseBatchWrite._get_http_options() + + request, metadata = self._interceptor.pre_batch_write(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseBatchWrite._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseBatchWrite._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseBatchWrite._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.BatchWrite", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchWrite", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._BatchWrite._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, spanner.BatchWriteResponse) + + resp = self._interceptor.post_batch_write(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_write_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.batch_write", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BatchWrite", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BeginTransaction(_BaseSpannerRestTransport._BaseBeginTransaction, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.BeginTransaction") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.BeginTransactionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> transaction.Transaction: + r"""Call the begin transaction method over HTTP. + + Args: + request (~.spanner.BeginTransactionRequest): + The request object. The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.transaction.Transaction: + A transaction. + """ + + http_options = _BaseSpannerRestTransport._BaseBeginTransaction._get_http_options() + + request, metadata = self._interceptor.pre_begin_transaction(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseBeginTransaction._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseBeginTransaction._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseBeginTransaction._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.BeginTransaction", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BeginTransaction", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._BeginTransaction._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transaction.Transaction() + pb_resp = transaction.Transaction.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_begin_transaction(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_begin_transaction_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = transaction.Transaction.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.begin_transaction", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "BeginTransaction", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Commit(_BaseSpannerRestTransport._BaseCommit, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.Commit") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.CommitRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> commit_response.CommitResponse: + r"""Call the commit method over HTTP. + + Args: + request (~.spanner.CommitRequest): + The request object. The request for + [Commit][google.spanner.v1.Spanner.Commit]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.commit_response.CommitResponse: + The response for + [Commit][google.spanner.v1.Spanner.Commit]. + + """ + + http_options = _BaseSpannerRestTransport._BaseCommit._get_http_options() + + request, metadata = self._interceptor.pre_commit(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseCommit._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseCommit._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseCommit._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.Commit", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Commit", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._Commit._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = commit_response.CommitResponse() + pb_resp = commit_response.CommitResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_commit(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = commit_response.CommitResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.commit", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Commit", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSession(_BaseSpannerRestTransport._BaseCreateSession, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.CreateSession") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.CreateSessionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.Session: + r"""Call the create session method over HTTP. + + Args: + request (~.spanner.CreateSessionRequest): + The request object. The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.Session: + A session in the Cloud Spanner API. + """ + + http_options = _BaseSpannerRestTransport._BaseCreateSession._get_http_options() + + request, metadata = self._interceptor.pre_create_session(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseCreateSession._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseCreateSession._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseCreateSession._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.CreateSession", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "CreateSession", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._CreateSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.Session() + pb_resp = spanner.Session.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.Session.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.create_session", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "CreateSession", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSession(_BaseSpannerRestTransport._BaseDeleteSession, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.DeleteSession") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner.DeleteSessionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete session method over HTTP. + + Args: + request (~.spanner.DeleteSessionRequest): + The request object. The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseSpannerRestTransport._BaseDeleteSession._get_http_options() + + request, metadata = self._interceptor.pre_delete_session(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseDeleteSession._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseDeleteSession._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.DeleteSession", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "DeleteSession", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._DeleteSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExecuteBatchDml(_BaseSpannerRestTransport._BaseExecuteBatchDml, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ExecuteBatchDml") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.ExecuteBatchDmlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.ExecuteBatchDmlResponse: + r"""Call the execute batch dml method over HTTP. + + Args: + request (~.spanner.ExecuteBatchDmlRequest): + The request object. The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.ExecuteBatchDmlResponse: + The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of + [ResultSet][google.spanner.v1.ResultSet] messages, one + for each DML statement that has successfully executed, + in the same order as the statements in the request. If a + statement fails, the status in the response body + identifies the cause of the failure. + + To check for DML statements that failed, use the + following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value ``OK`` + indicates that all statements were executed + successfully. + 2. If the status was not ``OK``, check the number of + result sets in the response. If the response contains + ``N`` [ResultSet][google.spanner.v1.ResultSet] + messages, then statement ``N+1`` in the request + failed. + + Example 1: + + - Request: 5 DML statements, all executed successfully. + - Response: 5 [ResultSet][google.spanner.v1.ResultSet] + messages, with the status ``OK``. + + Example 2: + + - Request: 5 DML statements. The third statement has a + syntax error. + - Response: 2 [ResultSet][google.spanner.v1.ResultSet] + messages, and a syntax error (``INVALID_ARGUMENT``) + status. The number of + [ResultSet][google.spanner.v1.ResultSet] messages + indicates that the third statement failed, and the + fourth and fifth statements were not executed. + + """ + + http_options = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_http_options() + + request, metadata = self._interceptor.pre_execute_batch_dml(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ExecuteBatchDml", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteBatchDml", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ExecuteBatchDml._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.ExecuteBatchDmlResponse() + pb_resp = spanner.ExecuteBatchDmlResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_execute_batch_dml(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_batch_dml_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.ExecuteBatchDmlResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.execute_batch_dml", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteBatchDml", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExecuteSql(_BaseSpannerRestTransport._BaseExecuteSql, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ExecuteSql") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.ExecuteSqlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> result_set.ResultSet: + r"""Call the execute sql method over HTTP. + + Args: + request (~.spanner.ExecuteSqlRequest): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + + http_options = _BaseSpannerRestTransport._BaseExecuteSql._get_http_options() + + request, metadata = self._interceptor.pre_execute_sql(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseExecuteSql._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseExecuteSql._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseExecuteSql._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ExecuteSql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteSql", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ExecuteSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = result_set.ResultSet() + pb_resp = result_set.ResultSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_execute_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_sql_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = result_set.ResultSet.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.execute_sql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteSql", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExecuteStreamingSql(_BaseSpannerRestTransport._BaseExecuteStreamingSql, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ExecuteStreamingSql") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__(self, + request: spanner.ExecuteSqlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the execute streaming sql method over HTTP. + + Args: + request (~.spanner.ExecuteSqlRequest): + The request object. The request for + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.PartialResultSet: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + + http_options = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_http_options() + + request, metadata = self._interceptor.pre_execute_streaming_sql(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ExecuteStreamingSql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteStreamingSql", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ExecuteStreamingSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) + + resp = self._interceptor.post_execute_streaming_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_streaming_sql_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.execute_streaming_sql", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ExecuteStreamingSql", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSession(_BaseSpannerRestTransport._BaseGetSession, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.GetSession") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner.GetSessionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.Session: + r"""Call the get session method over HTTP. + + Args: + request (~.spanner.GetSessionRequest): + The request object. The request for + [GetSession][google.spanner.v1.Spanner.GetSession]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.Session: + A session in the Cloud Spanner API. + """ + + http_options = _BaseSpannerRestTransport._BaseGetSession._get_http_options() + + request, metadata = self._interceptor.pre_get_session(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseGetSession._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseGetSession._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.GetSession", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "GetSession", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._GetSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.Session() + pb_resp = spanner.Session.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.Session.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.get_session", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "GetSession", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSessions(_BaseSpannerRestTransport._BaseListSessions, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.ListSessions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner.ListSessionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.ListSessionsResponse: + r"""Call the list sessions method over HTTP. + + Args: + request (~.spanner.ListSessionsRequest): + The request object. The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.ListSessionsResponse: + The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + """ + + http_options = _BaseSpannerRestTransport._BaseListSessions._get_http_options() + + request, metadata = self._interceptor.pre_list_sessions(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseListSessions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseListSessions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.ListSessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ListSessions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._ListSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.ListSessionsResponse() + pb_resp = spanner.ListSessionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.ListSessionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.list_sessions", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "ListSessions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _PartitionQuery(_BaseSpannerRestTransport._BasePartitionQuery, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.PartitionQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.PartitionQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.PartitionResponse: + r"""Call the partition query method over HTTP. + + Args: + request (~.spanner.PartitionQueryRequest): + The request object. The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.PartitionResponse: + The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + + http_options = _BaseSpannerRestTransport._BasePartitionQuery._get_http_options() + + request, metadata = self._interceptor.pre_partition_query(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BasePartitionQuery._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BasePartitionQuery._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BasePartitionQuery._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.PartitionQuery", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionQuery", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._PartitionQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.PartitionResponse() + pb_resp = spanner.PartitionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_partition_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_partition_query_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.PartitionResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.partition_query", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _PartitionRead(_BaseSpannerRestTransport._BasePartitionRead, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.PartitionRead") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.PartitionReadRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner.PartitionResponse: + r"""Call the partition read method over HTTP. + + Args: + request (~.spanner.PartitionReadRequest): + The request object. The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner.PartitionResponse: + The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + """ + + http_options = _BaseSpannerRestTransport._BasePartitionRead._get_http_options() + + request, metadata = self._interceptor.pre_partition_read(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BasePartitionRead._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BasePartitionRead._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BasePartitionRead._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.PartitionRead", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionRead", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._PartitionRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner.PartitionResponse() + pb_resp = spanner.PartitionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_partition_read(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_partition_read_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner.PartitionResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.partition_read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "PartitionRead", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Read(_BaseSpannerRestTransport._BaseRead, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.Read") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.ReadRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> result_set.ResultSet: + r"""Call the read method over HTTP. + + Args: + request (~.spanner.ReadRequest): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.ResultSet: + Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + """ + + http_options = _BaseSpannerRestTransport._BaseRead._get_http_options() + + request, metadata = self._interceptor.pre_read(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseRead._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseRead._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseRead._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.Read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Read", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._Read._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = result_set.ResultSet() + pb_resp = result_set.ResultSet.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_read(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_read_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = result_set.ResultSet.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Read", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Rollback(_BaseSpannerRestTransport._BaseRollback, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.Rollback") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner.RollbackRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the rollback method over HTTP. + + Args: + request (~.spanner.RollbackRequest): + The request object. The request for + [Rollback][google.spanner.v1.Spanner.Rollback]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseSpannerRestTransport._BaseRollback._get_http_options() + + request, metadata = self._interceptor.pre_rollback(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseRollback._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseRollback._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseRollback._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.Rollback", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "Rollback", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._Rollback._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _StreamingRead(_BaseSpannerRestTransport._BaseStreamingRead, SpannerRestStub): + def __hash__(self): + return hash("SpannerRestTransport.StreamingRead") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__(self, + request: spanner.ReadRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> rest_streaming.ResponseIterator: + r"""Call the streaming read method over HTTP. + + Args: + request (~.spanner.ReadRequest): + The request object. The request for [Read][google.spanner.v1.Spanner.Read] + and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.result_set.PartialResultSet: + Partial results from a streaming read + or SQL query. Streaming reads and SQL + queries better tolerate large result + sets, large rows, and large values, but + are a little trickier to consume. + + """ + + http_options = _BaseSpannerRestTransport._BaseStreamingRead._get_http_options() + + request, metadata = self._interceptor.pre_streaming_read(request, metadata) + transcoded_request = _BaseSpannerRestTransport._BaseStreamingRead._get_transcoded_request(http_options, request) + + body = _BaseSpannerRestTransport._BaseStreamingRead._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseSpannerRestTransport._BaseStreamingRead._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner_v1.SpannerClient.StreamingRead", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "StreamingRead", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpannerRestTransport._StreamingRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) + + resp = self._interceptor.post_streaming_read(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_streaming_read_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner_v1.SpannerClient.streaming_read", + extra = { + "serviceName": "google.spanner.v1.Spanner", + "rpcName": "StreamingRead", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def batch_create_sessions(self) -> Callable[ + [spanner.BatchCreateSessionsRequest], + spanner.BatchCreateSessionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateSessions(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_write(self) -> Callable[ + [spanner.BatchWriteRequest], + spanner.BatchWriteResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore + + @property + def begin_transaction(self) -> Callable[ + [spanner.BeginTransactionRequest], + transaction.Transaction]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore + + @property + def commit(self) -> Callable[ + [spanner.CommitRequest], + commit_response.CommitResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Commit(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_session(self) -> Callable[ + [spanner.CreateSessionRequest], + spanner.Session]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_session(self) -> Callable[ + [spanner.DeleteSessionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_batch_dml(self) -> Callable[ + [spanner.ExecuteBatchDmlRequest], + spanner.ExecuteBatchDmlResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteBatchDml(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.ResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteSql(self._session, self._host, self._interceptor) # type: ignore + + @property + def execute_streaming_sql(self) -> Callable[ + [spanner.ExecuteSqlRequest], + result_set.PartialResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExecuteStreamingSql(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_session(self) -> Callable[ + [spanner.GetSessionRequest], + spanner.Session]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSession(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sessions(self) -> Callable[ + [spanner.ListSessionsRequest], + spanner.ListSessionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore + + @property + def partition_query(self) -> Callable[ + [spanner.PartitionQueryRequest], + spanner.PartitionResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore + + @property + def partition_read(self) -> Callable[ + [spanner.PartitionReadRequest], + spanner.PartitionResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PartitionRead(self._session, self._host, self._interceptor) # type: ignore + + @property + def read(self) -> Callable[ + [spanner.ReadRequest], + result_set.ResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Read(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback(self) -> Callable[ + [spanner.RollbackRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Rollback(self._session, self._host, self._interceptor) # type: ignore + + @property + def streaming_read(self) -> Callable[ + [spanner.ReadRequest], + result_set.PartialResultSet]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingRead(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SpannerRestTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py new file mode 100644 index 0000000000..79eb05d0cf --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py @@ -0,0 +1,817 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import SpannerTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.protobuf import empty_pb2 # type: ignore + + +class _BaseSpannerRestTransport(SpannerTransport): + """Base REST backend transport for Spanner. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseBatchCreateSessions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.BatchCreateSessionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseBatchCreateSessions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchWrite: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.BatchWriteRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseBatchWrite._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBeginTransaction: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.BeginTransactionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseBeginTransaction._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCommit: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.CommitRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseCommit._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSession: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.CreateSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseCreateSession._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSession: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.DeleteSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseDeleteSession._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExecuteBatchDml: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeBatchDml', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ExecuteBatchDmlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseExecuteBatchDml._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExecuteSql: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeSql', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ExecuteSqlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseExecuteSql._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseExecuteStreamingSql: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeStreamingSql', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ExecuteSqlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseExecuteStreamingSql._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSession: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.GetSessionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseGetSession._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSessions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ListSessionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseListSessions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePartitionQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionQuery', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.PartitionQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BasePartitionQuery._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePartitionRead: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionRead', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.PartitionReadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BasePartitionRead._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRead: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:read', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ReadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseRead._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRollback: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.RollbackRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseRollback._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseStreamingRead: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:streamingRead', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner.ReadRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseSpannerRestTransport._BaseStreamingRead._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__=( + '_BaseSpannerRestTransport', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py new file mode 100644 index 0000000000..d4c6938ee1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py @@ -0,0 +1,122 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .change_stream import ( + ChangeStreamRecord, +) +from .commit_response import ( + CommitResponse, +) +from .keys import ( + KeyRange, + KeySet, +) +from .mutation import ( + Mutation, +) +from .query_plan import ( + PlanNode, + QueryPlan, +) +from .result_set import ( + PartialResultSet, + ResultSet, + ResultSetMetadata, + ResultSetStats, +) +from .spanner import ( + BatchCreateSessionsRequest, + BatchCreateSessionsResponse, + BatchWriteRequest, + BatchWriteResponse, + BeginTransactionRequest, + CommitRequest, + CreateSessionRequest, + DeleteSessionRequest, + DirectedReadOptions, + ExecuteBatchDmlRequest, + ExecuteBatchDmlResponse, + ExecuteSqlRequest, + GetSessionRequest, + ListSessionsRequest, + ListSessionsResponse, + Partition, + PartitionOptions, + PartitionQueryRequest, + PartitionReadRequest, + PartitionResponse, + ReadRequest, + RequestOptions, + RollbackRequest, + Session, +) +from .transaction import ( + MultiplexedSessionPrecommitToken, + Transaction, + TransactionOptions, + TransactionSelector, +) +from .type import ( + StructType, + Type, + TypeAnnotationCode, + TypeCode, +) + +__all__ = ( + 'ChangeStreamRecord', + 'CommitResponse', + 'KeyRange', + 'KeySet', + 'Mutation', + 'PlanNode', + 'QueryPlan', + 'PartialResultSet', + 'ResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'BatchWriteRequest', + 'BatchWriteResponse', + 'BeginTransactionRequest', + 'CommitRequest', + 'CreateSessionRequest', + 'DeleteSessionRequest', + 'DirectedReadOptions', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'ExecuteSqlRequest', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'Partition', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'PartitionResponse', + 'ReadRequest', + 'RequestOptions', + 'RollbackRequest', + 'Session', + 'MultiplexedSessionPrecommitToken', + 'Transaction', + 'TransactionOptions', + 'TransactionSelector', + 'StructType', + 'Type', + 'TypeAnnotationCode', + 'TypeCode', +) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py new file mode 100644 index 0000000000..8db7f85bb5 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py @@ -0,0 +1,683 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'ChangeStreamRecord', + }, +) + + +class ChangeStreamRecord(proto.Message): + r"""Spanner Change Streams enable customers to capture and stream out + changes to their Spanner databases in real-time. A change stream can + be created with option partition_mode='IMMUTABLE_KEY_RANGE' or + partition_mode='MUTABLE_KEY_RANGE'. + + This message is only used in Change Streams created with the option + partition_mode='MUTABLE_KEY_RANGE'. Spanner automatically creates a + special Table-Valued Function (TVF) along with each Change Streams. + The function provides access to the change stream's records. The + function is named READ\_ (where + is the name of the change stream), and it + returns a table with only one column called ChangeRecord. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + data_change_record (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord): + Data change record describing a data change + for a change stream partition. + + This field is a member of `oneof`_ ``record``. + heartbeat_record (google.cloud.spanner_v1.types.ChangeStreamRecord.HeartbeatRecord): + Heartbeat record describing a heartbeat for a + change stream partition. + + This field is a member of `oneof`_ ``record``. + partition_start_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionStartRecord): + Partition start record describing a new + change stream partition. + + This field is a member of `oneof`_ ``record``. + partition_end_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEndRecord): + Partition end record describing a terminated + change stream partition. + + This field is a member of `oneof`_ ``record``. + partition_event_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord): + Partition event record describing key range + changes for a change stream partition. + + This field is a member of `oneof`_ ``record``. + """ + + class DataChangeRecord(proto.Message): + r"""A data change record contains a set of changes to a table + with the same modification type (insert, update, or delete) + committed at the same commit timestamp in one change stream + partition for the same transaction. Multiple data change records + can be returned for the same transaction across multiple change + stream partitions. + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Indicates the timestamp in which the change was committed. + DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + + The record sequence number ordering across partitions is + only meaningful in the context of a specific transaction. + Record sequence numbers are unique across partitions for a + specific transaction. Sort the DataChangeRecords for the + same + [server_transaction_id][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.server_transaction_id] + by + [record_sequence][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.record_sequence] + to reconstruct the ordering of the changes within the + transaction. + server_transaction_id (str): + Provides a globally unique string that represents the + transaction in which the change was committed. Multiple + transactions can have the same commit timestamp, but each + transaction has a unique server_transaction_id. + is_last_record_in_transaction_in_partition (bool): + Indicates whether this is the last record for + a transaction in the current partition. Clients + can use this field to determine when all + records for a transaction in the current + partition have been received. + table (str): + Name of the table affected by the change. + column_metadata (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ColumnMetadata]): + Provides metadata describing the columns associated with the + [mods][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods] + listed below. + mods (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.Mod]): + Describes the changes that were made. + mod_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModType): + Describes the type of change. + value_capture_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ValueCaptureType): + Describes the value capture type that was + specified in the change stream configuration + when this change was captured. + number_of_records_in_transaction (int): + Indicates the number of data change records + that are part of this transaction across all + change stream partitions. This value can be used + to assemble all the records associated with a + particular transaction. + number_of_partitions_in_transaction (int): + Indicates the number of partitions that + return data change records for this transaction. + This value can be helpful in assembling all + records associated with a particular + transaction. + transaction_tag (str): + Indicates the transaction tag associated with + this transaction. + is_system_transaction (bool): + Indicates whether the transaction is a system + transaction. System transactions include those + issued by time-to-live (TTL), column backfill, + etc. + """ + class ModType(proto.Enum): + r"""Mod type describes the type of change Spanner applied to the data. + For example, if the client submits an INSERT_OR_UPDATE request, + Spanner will perform an insert if there is no existing row and + return ModType INSERT. Alternatively, if there is an existing row, + Spanner will perform an update and return ModType UPDATE. + + Values: + MOD_TYPE_UNSPECIFIED (0): + Not specified. + INSERT (10): + Indicates data was inserted. + UPDATE (20): + Indicates existing data was updated. + DELETE (30): + Indicates existing data was deleted. + """ + MOD_TYPE_UNSPECIFIED = 0 + INSERT = 10 + UPDATE = 20 + DELETE = 30 + + class ValueCaptureType(proto.Enum): + r"""Value capture type describes which values are recorded in the + data change record. + + Values: + VALUE_CAPTURE_TYPE_UNSPECIFIED (0): + Not specified. + OLD_AND_NEW_VALUES (10): + Records both old and new values of the + modified watched columns. + NEW_VALUES (20): + Records only new values of the modified + watched columns. + NEW_ROW (30): + Records new values of all watched columns, + including modified and unmodified columns. + NEW_ROW_AND_OLD_VALUES (40): + Records the new values of all watched + columns, including modified and unmodified + columns. Also records the old values of the + modified columns. + """ + VALUE_CAPTURE_TYPE_UNSPECIFIED = 0 + OLD_AND_NEW_VALUES = 10 + NEW_VALUES = 20 + NEW_ROW = 30 + NEW_ROW_AND_OLD_VALUES = 40 + + class ColumnMetadata(proto.Message): + r"""Metadata for a column. + + Attributes: + name (str): + Name of the column. + type_ (google.cloud.spanner_v1.types.Type): + Type of the column. + is_primary_key (bool): + Indicates whether the column is a primary key + column. + ordinal_position (int): + Ordinal position of the column based on the + original table definition in the schema starting + with a value of 1. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: gs_type.Type = proto.Field( + proto.MESSAGE, + number=2, + message=gs_type.Type, + ) + is_primary_key: bool = proto.Field( + proto.BOOL, + number=3, + ) + ordinal_position: int = proto.Field( + proto.INT64, + number=4, + ) + + class ModValue(proto.Message): + r"""Returns the value and associated metadata for a particular field of + the + [Mod][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod]. + + Attributes: + column_metadata_index (int): + Index within the repeated + [column_metadata][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.column_metadata] + field, to obtain the column metadata for the column that was + modified. + value (google.protobuf.struct_pb2.Value): + The value of the column. + """ + + column_metadata_index: int = proto.Field( + proto.INT32, + number=1, + ) + value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + class Mod(proto.Message): + r"""A mod describes all data changes in a watched table row. + + Attributes: + keys (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): + Returns the value of the primary key of the + modified row. + old_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): + Returns the old values before the change for the modified + columns. Always empty for + [INSERT][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.INSERT], + or if old values are not being captured specified by + [value_capture_type][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType]. + new_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): + Returns the new values after the change for the modified + columns. Always empty for + [DELETE][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.DELETE]. + """ + + keys: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ChangeStreamRecord.DataChangeRecord.ModValue', + ) + old_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ChangeStreamRecord.DataChangeRecord.ModValue', + ) + new_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ChangeStreamRecord.DataChangeRecord.ModValue', + ) + + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + server_transaction_id: str = proto.Field( + proto.STRING, + number=3, + ) + is_last_record_in_transaction_in_partition: bool = proto.Field( + proto.BOOL, + number=4, + ) + table: str = proto.Field( + proto.STRING, + number=5, + ) + column_metadata: MutableSequence['ChangeStreamRecord.DataChangeRecord.ColumnMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ChangeStreamRecord.DataChangeRecord.ColumnMetadata', + ) + mods: MutableSequence['ChangeStreamRecord.DataChangeRecord.Mod'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ChangeStreamRecord.DataChangeRecord.Mod', + ) + mod_type: 'ChangeStreamRecord.DataChangeRecord.ModType' = proto.Field( + proto.ENUM, + number=8, + enum='ChangeStreamRecord.DataChangeRecord.ModType', + ) + value_capture_type: 'ChangeStreamRecord.DataChangeRecord.ValueCaptureType' = proto.Field( + proto.ENUM, + number=9, + enum='ChangeStreamRecord.DataChangeRecord.ValueCaptureType', + ) + number_of_records_in_transaction: int = proto.Field( + proto.INT32, + number=10, + ) + number_of_partitions_in_transaction: int = proto.Field( + proto.INT32, + number=11, + ) + transaction_tag: str = proto.Field( + proto.STRING, + number=12, + ) + is_system_transaction: bool = proto.Field( + proto.BOOL, + number=13, + ) + + class HeartbeatRecord(proto.Message): + r"""A heartbeat record is returned as a progress indicator, when + there are no data changes or any other partition record types in + the change stream partition. + + Attributes: + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Indicates the timestamp at which the query + has returned all the records in the change + stream partition with timestamp <= heartbeat + timestamp. The heartbeat timestamp will not be + the same as the timestamps of other record types + in the same partition. + """ + + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + class PartitionStartRecord(proto.Message): + r"""A partition start record serves as a notification that the + client should schedule the partitions to be queried. + PartitionStartRecord returns information about one or more + partitions. + + Attributes: + start_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Start timestamp at which the partitions should be queried to + return change stream records with timestamps >= + start_timestamp. DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + partition_tokens (MutableSequence[str]): + Unique partition identifiers to be used in + queries. + """ + + start_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + partition_tokens: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + class PartitionEndRecord(proto.Message): + r"""A partition end record serves as a notification that the + client should stop reading the partition. No further records are + expected to be retrieved on it. + + Attributes: + end_timestamp (google.protobuf.timestamp_pb2.Timestamp): + End timestamp at which the change stream partition is + terminated. All changes generated by this partition will + have timestamps <= end_timestamp. + DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. PartitionEndRecord is the last record + returned for a partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + partition_token (str): + Unique partition identifier describing the terminated change + stream partition. + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.partition_token] + is equal to the partition token of the change stream + partition currently queried to return this + PartitionEndRecord. + """ + + end_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + partition_token: str = proto.Field( + proto.STRING, + number=3, + ) + + class PartitionEventRecord(proto.Message): + r"""A partition event record describes key range changes for a change + stream partition. The changes to a row defined by its primary key + can be captured in one change stream partition for a specific time + range, and then be captured in a different change stream partition + for a different time range. This movement of key ranges across + change stream partitions is a reflection of activities, such as + Spanner's dynamic splitting and load balancing, etc. Processing this + event is needed if users want to guarantee processing of the changes + for any key in timestamp order. If time ordered processing of + changes for a primary key is not needed, this event can be ignored. + To guarantee time ordered processing for each primary key, if the + event describes move-ins, the reader of this partition needs to wait + until the readers of the source partitions have processed all + records with timestamps <= this + PartitionEventRecord.commit_timestamp, before advancing beyond this + PartitionEventRecord. If the event describes move-outs, the reader + can notify the readers of the destination partitions that they can + continue processing. + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Indicates the commit timestamp at which the key range change + occurred. DataChangeRecord.commit_timestamps, + PartitionStartRecord.start_timestamps, + PartitionEventRecord.commit_timestamps, and + PartitionEndRecord.end_timestamps can have the same value in + the same partition. + record_sequence (str): + Record sequence numbers are unique and monotonically + increasing (but not necessarily contiguous) for a specific + timestamp across record types in the same partition. To + guarantee ordered processing, the reader should process + records (of potentially different types) in record_sequence + order for a specific timestamp in the same partition. + partition_token (str): + Unique partition identifier describing the partition this + event occurred on. + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + is equal to the partition token of the change stream + partition currently queried to return this + PartitionEventRecord. + move_in_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveInEvent]): + Set when one or more key ranges are moved into the change + stream partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + Example: Two key ranges are moved into partition (P1) from + partition (P2) and partition (P3) in a single transaction at + timestamp T. + + The PartitionEventRecord returned in P1 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P1" move_in_events { source_partition_token: "P2" } + move_in_events { source_partition_token: "P3" } } + + The PartitionEventRecord returned in P2 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P2" move_out_events { destination_partition_token: "P1" } } + + The PartitionEventRecord returned in P3 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P3" move_out_events { destination_partition_token: "P1" } } + move_out_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent]): + Set when one or more key ranges are moved out of the change + stream partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + Example: Two key ranges are moved out of partition (P1) to + partition (P2) and partition (P3) in a single transaction at + timestamp T. + + The PartitionEventRecord returned in P1 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P1" move_out_events { destination_partition_token: "P2" } + move_out_events { destination_partition_token: "P3" } } + + The PartitionEventRecord returned in P2 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P2" move_in_events { source_partition_token: "P1" } } + + The PartitionEventRecord returned in P3 will reflect the + move as: + + PartitionEventRecord { commit_timestamp: T partition_token: + "P3" move_in_events { source_partition_token: "P1" } } + """ + + class MoveInEvent(proto.Message): + r"""Describes move-in of the key ranges into the change stream partition + identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + To maintain processing the changes for a particular key in timestamp + order, the query processing the change stream partition identified + by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + should not advance beyond the partition event record commit + timestamp until the queries processing the source change stream + partitions have processed all change stream records with timestamps + <= the partition event record commit timestamp. + + Attributes: + source_partition_token (str): + An unique partition identifier describing the + source change stream partition that recorded + changes for the key range that is moving into + this partition. + """ + + source_partition_token: str = proto.Field( + proto.STRING, + number=1, + ) + + class MoveOutEvent(proto.Message): + r"""Describes move-out of the key ranges out of the change stream + partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. + + To maintain processing the changes for a particular key in timestamp + order, the query processing the + [MoveOutEvent][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent] + in the partition identified by + [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] + should inform the queries processing the destination partitions that + they can unblock and proceed processing records past the + [commit_timestamp][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.commit_timestamp]. + + Attributes: + destination_partition_token (str): + An unique partition identifier describing the + destination change stream partition that will + record changes for the key range that is moving + out of this partition. + """ + + destination_partition_token: str = proto.Field( + proto.STRING, + number=1, + ) + + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + record_sequence: str = proto.Field( + proto.STRING, + number=2, + ) + partition_token: str = proto.Field( + proto.STRING, + number=3, + ) + move_in_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveInEvent'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ChangeStreamRecord.PartitionEventRecord.MoveInEvent', + ) + move_out_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveOutEvent'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='ChangeStreamRecord.PartitionEventRecord.MoveOutEvent', + ) + + data_change_record: DataChangeRecord = proto.Field( + proto.MESSAGE, + number=1, + oneof='record', + message=DataChangeRecord, + ) + heartbeat_record: HeartbeatRecord = proto.Field( + proto.MESSAGE, + number=2, + oneof='record', + message=HeartbeatRecord, + ) + partition_start_record: PartitionStartRecord = proto.Field( + proto.MESSAGE, + number=3, + oneof='record', + message=PartitionStartRecord, + ) + partition_end_record: PartitionEndRecord = proto.Field( + proto.MESSAGE, + number=4, + oneof='record', + message=PartitionEndRecord, + ) + partition_event_record: PartitionEventRecord = proto.Field( + proto.MESSAGE, + number=5, + oneof='record', + message=PartitionEventRecord, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py new file mode 100644 index 0000000000..3771ef1f71 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import transaction +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'CommitResponse', + }, +) + + +class CommitResponse(proto.Message): + r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + The Cloud Spanner timestamp at which the + transaction committed. + commit_stats (google.cloud.spanner_v1.types.CommitResponse.CommitStats): + The statistics about this ``Commit``. Not returned by + default. For more information, see + [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + If specified, transaction has not committed + yet. You must retry the commit with the new + precommit token. + + This field is a member of `oneof`_ ``MultiplexedSessionRetry``. + snapshot_timestamp (google.protobuf.timestamp_pb2.Timestamp): + If ``TransactionOptions.isolation_level`` is set to + ``IsolationLevel.REPEATABLE_READ``, then the snapshot + timestamp is the timestamp at which all reads in the + transaction ran. This timestamp is never returned. + """ + + class CommitStats(proto.Message): + r"""Additional statistics about a commit. + + Attributes: + mutation_count (int): + The total number of mutations for the transaction. Knowing + the ``mutation_count`` value can help you maximize the + number of mutations in a transaction and minimize the number + of API round trips. You can also monitor this value to + prevent transactions from exceeding the system + `limit `__. + If the number of mutations exceeds the limit, the server + returns + `INVALID_ARGUMENT `__. + """ + + mutation_count: int = proto.Field( + proto.INT64, + number=1, + ) + + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + commit_stats: CommitStats = proto.Field( + proto.MESSAGE, + number=2, + message=CommitStats, + ) + precommit_token: transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=4, + oneof='MultiplexedSessionRetry', + message=transaction.MultiplexedSessionPrecommitToken, + ) + snapshot_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py new file mode 100644 index 0000000000..54f745c9b7 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'KeyRange', + 'KeySet', + }, +) + + +class KeyRange(proto.Message): + r"""KeyRange represents a range of rows in a table or index. + + A range has a start key and an end key. These keys can be open or + closed, indicating if the range includes rows with that key. + + Keys are represented by lists, where the ith value in the list + corresponds to the ith component of the table or index primary key. + Individual values are encoded as described + [here][google.spanner.v1.TypeCode]. + + For example, consider the following table definition: + + :: + + CREATE TABLE UserEvents ( + UserName STRING(MAX), + EventDate STRING(10) + ) PRIMARY KEY(UserName, EventDate); + + The following keys name rows in this table: + + :: + + ["Bob", "2014-09-23"] + ["Alfred", "2015-06-12"] + + Since the ``UserEvents`` table's ``PRIMARY KEY`` clause names two + columns, each ``UserEvents`` key has two elements; the first is the + ``UserName``, and the second is the ``EventDate``. + + Key ranges with multiple components are interpreted + lexicographically by component using the table or index key's + declared sort order. For example, the following range returns all + events for user ``"Bob"`` that occurred in the year 2015: + + :: + + "start_closed": ["Bob", "2015-01-01"] + "end_closed": ["Bob", "2015-12-31"] + + Start and end keys can omit trailing key components. This affects + the inclusion and exclusion of rows that exactly match the provided + key components: if the key is closed, then rows that exactly match + the provided components are included; if the key is open, then rows + that exactly match are not included. + + For example, the following range includes all events for ``"Bob"`` + that occurred during and after the year 2000: + + :: + + "start_closed": ["Bob", "2000-01-01"] + "end_closed": ["Bob"] + + The next example retrieves all events for ``"Bob"``: + + :: + + "start_closed": ["Bob"] + "end_closed": ["Bob"] + + To retrieve events before the year 2000: + + :: + + "start_closed": ["Bob"] + "end_open": ["Bob", "2000-01-01"] + + The following range includes all rows in the table: + + :: + + "start_closed": [] + "end_closed": [] + + This range returns all users whose ``UserName`` begins with any + character from A to C: + + :: + + "start_closed": ["A"] + "end_open": ["D"] + + This range returns all users whose ``UserName`` begins with B: + + :: + + "start_closed": ["B"] + "end_open": ["C"] + + Key ranges honor column sort order. For example, suppose a table is + defined as follows: + + :: + + CREATE TABLE DescendingSortedTable { + Key INT64, + ... + ) PRIMARY KEY(Key DESC); + + The following range retrieves all rows with key values between 1 and + 100 inclusive: + + :: + + "start_closed": ["100"] + "end_closed": ["1"] + + Note that 100 is passed as the start, and 1 is passed as the end, + because ``Key`` is a descending column in the schema. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_closed (google.protobuf.struct_pb2.ListValue): + If the start is closed, then the range includes all rows + whose first ``len(start_closed)`` key columns exactly match + ``start_closed``. + + This field is a member of `oneof`_ ``start_key_type``. + start_open (google.protobuf.struct_pb2.ListValue): + If the start is open, then the range excludes rows whose + first ``len(start_open)`` key columns exactly match + ``start_open``. + + This field is a member of `oneof`_ ``start_key_type``. + end_closed (google.protobuf.struct_pb2.ListValue): + If the end is closed, then the range includes all rows whose + first ``len(end_closed)`` key columns exactly match + ``end_closed``. + + This field is a member of `oneof`_ ``end_key_type``. + end_open (google.protobuf.struct_pb2.ListValue): + If the end is open, then the range excludes rows whose first + ``len(end_open)`` key columns exactly match ``end_open``. + + This field is a member of `oneof`_ ``end_key_type``. + """ + + start_closed: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=1, + oneof='start_key_type', + message=struct_pb2.ListValue, + ) + start_open: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=2, + oneof='start_key_type', + message=struct_pb2.ListValue, + ) + end_closed: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=3, + oneof='end_key_type', + message=struct_pb2.ListValue, + ) + end_open: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=4, + oneof='end_key_type', + message=struct_pb2.ListValue, + ) + + +class KeySet(proto.Message): + r"""``KeySet`` defines a collection of Cloud Spanner keys and/or key + ranges. All the keys are expected to be in the same table or index. + The keys need not be sorted in any particular way. + + If the same key is specified multiple times in the set (for example + if two ranges, two keys, or a key and a range overlap), Cloud + Spanner behaves as if the key were only specified once. + + Attributes: + keys (MutableSequence[google.protobuf.struct_pb2.ListValue]): + A list of specific keys. Entries in ``keys`` should have + exactly as many elements as there are columns in the primary + or index key with which this ``KeySet`` is used. Individual + key values are encoded as described + [here][google.spanner.v1.TypeCode]. + ranges (MutableSequence[google.cloud.spanner_v1.types.KeyRange]): + A list of key ranges. See + [KeyRange][google.spanner.v1.KeyRange] for more information + about key range specifications. + all_ (bool): + For convenience ``all`` can be set to ``true`` to indicate + that this ``KeySet`` matches all keys in the table or index. + Note that any keys specified in ``keys`` or ``ranges`` are + only yielded once. + """ + + keys: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.ListValue, + ) + ranges: MutableSequence['KeyRange'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='KeyRange', + ) + all_: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py new file mode 100644 index 0000000000..0d4f7b4466 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import keys +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'Mutation', + }, +) + + +class Mutation(proto.Message): + r"""A modification to one or more Cloud Spanner rows. Mutations can be + applied to a Cloud Spanner database by sending them in a + [Commit][google.spanner.v1.Spanner.Commit] call. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + insert (google.cloud.spanner_v1.types.Mutation.Write): + Insert new rows in a table. If any of the rows already + exist, the write or transaction fails with error + ``ALREADY_EXISTS``. + + This field is a member of `oneof`_ ``operation``. + update (google.cloud.spanner_v1.types.Mutation.Write): + Update existing rows in a table. If any of the rows does not + already exist, the transaction fails with error + ``NOT_FOUND``. + + This field is a member of `oneof`_ ``operation``. + insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): + Like [insert][google.spanner.v1.Mutation.insert], except + that if the row already exists, then its column values are + overwritten with the ones provided. Any column values not + explicitly written are preserved. + + When using + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], + just as when using + [insert][google.spanner.v1.Mutation.insert], all + ``NOT NULL`` columns in the table must be given a value. + This holds true even when the row already exists and will + therefore actually be updated. + + This field is a member of `oneof`_ ``operation``. + replace (google.cloud.spanner_v1.types.Mutation.Write): + Like [insert][google.spanner.v1.Mutation.insert], except + that if the row already exists, it is deleted, and the + column values provided are inserted instead. Unlike + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], + this means any values not explicitly written become + ``NULL``. + + In an interleaved table, if you create the child table with + the ``ON DELETE CASCADE`` annotation, then replacing a + parent row also deletes the child rows. Otherwise, you must + delete the child rows before you replace the parent row. + + This field is a member of `oneof`_ ``operation``. + delete (google.cloud.spanner_v1.types.Mutation.Delete): + Delete rows from a table. Succeeds whether or + not the named rows were present. + + This field is a member of `oneof`_ ``operation``. + """ + + class Write(proto.Message): + r"""Arguments to [insert][google.spanner.v1.Mutation.insert], + [update][google.spanner.v1.Mutation.update], + [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and + [replace][google.spanner.v1.Mutation.replace] operations. + + Attributes: + table (str): + Required. The table whose rows will be + written. + columns (MutableSequence[str]): + The names of the columns in + [table][google.spanner.v1.Mutation.Write.table] to be + written. + + The list of columns must contain enough columns to allow + Cloud Spanner to derive values for all primary key columns + in the row(s) to be modified. + values (MutableSequence[google.protobuf.struct_pb2.ListValue]): + The values to be written. ``values`` can contain more than + one list of values. If it does, then multiple rows are + written, one for each entry in ``values``. Each list in + ``values`` must have exactly as many entries as there are + entries in + [columns][google.spanner.v1.Mutation.Write.columns] above. + Sending multiple lists is equivalent to sending multiple + ``Mutation``\ s, each containing one ``values`` entry and + repeating [table][google.spanner.v1.Mutation.Write.table] + and [columns][google.spanner.v1.Mutation.Write.columns]. + Individual values in each list are encoded as described + [here][google.spanner.v1.TypeCode]. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + values: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=struct_pb2.ListValue, + ) + + class Delete(proto.Message): + r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. + + Attributes: + table (str): + Required. The table whose rows will be + deleted. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. The primary keys of the rows within + [table][google.spanner.v1.Mutation.Delete.table] to delete. + The primary keys must be specified in the order in which + they appear in the ``PRIMARY KEY()`` clause of the table's + equivalent DDL statement (the DDL statement used to create + the table). Delete is idempotent. The transaction will + succeed even if some or all rows do not exist. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + key_set: keys.KeySet = proto.Field( + proto.MESSAGE, + number=2, + message=keys.KeySet, + ) + + insert: Write = proto.Field( + proto.MESSAGE, + number=1, + oneof='operation', + message=Write, + ) + update: Write = proto.Field( + proto.MESSAGE, + number=2, + oneof='operation', + message=Write, + ) + insert_or_update: Write = proto.Field( + proto.MESSAGE, + number=3, + oneof='operation', + message=Write, + ) + replace: Write = proto.Field( + proto.MESSAGE, + number=4, + oneof='operation', + message=Write, + ) + delete: Delete = proto.Field( + proto.MESSAGE, + number=5, + oneof='operation', + message=Delete, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py new file mode 100644 index 0000000000..26e730d2d0 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'PlanNode', + 'QueryPlan', + }, +) + + +class PlanNode(proto.Message): + r"""Node information for nodes appearing in a + [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. + + Attributes: + index (int): + The ``PlanNode``'s index in [node + list][google.spanner.v1.QueryPlan.plan_nodes]. + kind (google.cloud.spanner_v1.types.PlanNode.Kind): + Used to determine the type of node. May be needed for + visualizing different kinds of nodes differently. For + example, If the node is a + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it + will have a condensed representation which can be used to + directly embed a description of the node in its parent. + display_name (str): + The display name for the node. + child_links (MutableSequence[google.cloud.spanner_v1.types.PlanNode.ChildLink]): + List of child node ``index``\ es and their relationship to + this parent. + short_representation (google.cloud.spanner_v1.types.PlanNode.ShortRepresentation): + Condensed representation for + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. + metadata (google.protobuf.struct_pb2.Struct): + Attributes relevant to the node contained in a group of + key-value pairs. For example, a Parameter Reference node + could have the following information in its metadata: + + :: + + { + "parameter_reference": "param1", + "parameter_type": "array" + } + execution_stats (google.protobuf.struct_pb2.Struct): + The execution statistics associated with the + node, contained in a group of key-value pairs. + Only present if the plan was returned as a + result of a profile query. For example, number + of executions, number of rows/time per execution + etc. + """ + class Kind(proto.Enum): + r"""The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes + between the two different kinds of nodes that can appear in a query + plan. + + Values: + KIND_UNSPECIFIED (0): + Not specified. + RELATIONAL (1): + Denotes a Relational operator node in the expression tree. + Relational operators represent iterative processing of rows + during query execution. For example, a ``TableScan`` + operation that reads rows from a table. + SCALAR (2): + Denotes a Scalar node in the expression tree. + Scalar nodes represent non-iterable entities in + the query plan. For example, constants or + arithmetic operators appearing inside predicate + expressions or references to column names. + """ + KIND_UNSPECIFIED = 0 + RELATIONAL = 1 + SCALAR = 2 + + class ChildLink(proto.Message): + r"""Metadata associated with a parent-child relationship appearing in a + [PlanNode][google.spanner.v1.PlanNode]. + + Attributes: + child_index (int): + The node to which the link points. + type_ (str): + The type of the link. For example, in Hash + Joins this could be used to distinguish between + the build child and the probe child, or in the + case of the child being an output variable, to + represent the tag associated with the output + variable. + variable (str): + Only present if the child node is + [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and + corresponds to an output variable of the parent node. The + field carries the name of the output variable. For example, + a ``TableScan`` operator that reads rows from a table will + have child links to the ``SCALAR`` nodes representing the + output variables created for each column that is read by the + operator. The corresponding ``variable`` fields will be set + to the variable names assigned to the columns. + """ + + child_index: int = proto.Field( + proto.INT32, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + variable: str = proto.Field( + proto.STRING, + number=3, + ) + + class ShortRepresentation(proto.Message): + r"""Condensed representation of a node and its subtree. Only present for + ``SCALAR`` [PlanNode(s)][google.spanner.v1.PlanNode]. + + Attributes: + description (str): + A string representation of the expression + subtree rooted at this node. + subqueries (MutableMapping[str, int]): + A mapping of (subquery variable name) -> (subquery node id) + for cases where the ``description`` string of this node + references a ``SCALAR`` subquery contained in the expression + subtree rooted at this node. The referenced ``SCALAR`` + subquery may not necessarily be a direct child of this node. + """ + + description: str = proto.Field( + proto.STRING, + number=1, + ) + subqueries: MutableMapping[str, int] = proto.MapField( + proto.STRING, + proto.INT32, + number=2, + ) + + index: int = proto.Field( + proto.INT32, + number=1, + ) + kind: Kind = proto.Field( + proto.ENUM, + number=2, + enum=Kind, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + child_links: MutableSequence[ChildLink] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=ChildLink, + ) + short_representation: ShortRepresentation = proto.Field( + proto.MESSAGE, + number=5, + message=ShortRepresentation, + ) + metadata: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + execution_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class QueryPlan(proto.Message): + r"""Contains an ordered list of nodes appearing in the query + plan. + + Attributes: + plan_nodes (MutableSequence[google.cloud.spanner_v1.types.PlanNode]): + The nodes in the query plan. Plan nodes are returned in + pre-order starting with the plan root. Each + [PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds + to its index in ``plan_nodes``. + """ + + plan_nodes: MutableSequence['PlanNode'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PlanNode', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py new file mode 100644 index 0000000000..9503d0c172 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import query_plan as gs_query_plan +from google.cloud.spanner_v1.types import transaction as gs_transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'ResultSet', + 'PartialResultSet', + 'ResultSetMetadata', + 'ResultSetStats', + }, +) + + +class ResultSet(proto.Message): + r"""Results from [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + + Attributes: + metadata (google.cloud.spanner_v1.types.ResultSetMetadata): + Metadata about the result set, such as row + type information. + rows (MutableSequence[google.protobuf.struct_pb2.ListValue]): + Each element in ``rows`` is a row whose format is defined by + [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. + The ith element in each row matches the ith field in + [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. + Elements are encoded based on type as described + [here][google.spanner.v1.TypeCode]. + stats (google.cloud.spanner_v1.types.ResultSetStats): + Query plan and execution statistics for the SQL statement + that produced this result set. These can be requested by + setting + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + DML statements always produce stats containing the number of + rows modified, unless executed using the + [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + Other fields might or might not be populated, based on the + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. A precommit token is included if the read-write + transaction is on a multiplexed session. Pass the precommit + token with the highest sequence number from this transaction + attempt to the [Commit][google.spanner.v1.Spanner.Commit] + request for this transaction. + """ + + metadata: 'ResultSetMetadata' = proto.Field( + proto.MESSAGE, + number=1, + message='ResultSetMetadata', + ) + rows: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.ListValue, + ) + stats: 'ResultSetStats' = proto.Field( + proto.MESSAGE, + number=3, + message='ResultSetStats', + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=5, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + + +class PartialResultSet(proto.Message): + r"""Partial results from a streaming read or SQL query. Streaming + reads and SQL queries better tolerate large result sets, large + rows, and large values, but are a little trickier to consume. + + Attributes: + metadata (google.cloud.spanner_v1.types.ResultSetMetadata): + Metadata about the result set, such as row + type information. Only present in the first + response. + values (MutableSequence[google.protobuf.struct_pb2.Value]): + A streamed result set consists of a stream of values, which + might be split into many ``PartialResultSet`` messages to + accommodate large rows and/or large values. Every N complete + values defines a row, where N is equal to the number of + entries in + [metadata.row_type.fields][google.spanner.v1.StructType.fields]. + + Most values are encoded based on type as described + [here][google.spanner.v1.TypeCode]. + + It's possible that the last value in values is "chunked", + meaning that the rest of the value is sent in subsequent + ``PartialResultSet``\ (s). This is denoted by the + [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] + field. Two or more chunked values can be merged to form a + complete value as follows: + + - ``bool/number/null``: can't be chunked + - ``string``: concatenate the strings + - ``list``: concatenate the lists. If the last element in a + list is a ``string``, ``list``, or ``object``, merge it + with the first element in the next list by applying these + rules recursively. + - ``object``: concatenate the (field name, field value) + pairs. If a field name is duplicated, then apply these + rules recursively to merge the field values. + + Some examples of merging: + + :: + + Strings are concatenated. + "foo", "bar" => "foobar" + + Lists of non-strings are concatenated. + [2, 3], [4] => [2, 3, 4] + + Lists are concatenated, but the last and first elements are merged + because they are strings. + ["a", "b"], ["c", "d"] => ["a", "bc", "d"] + + Lists are concatenated, but the last and first elements are merged + because they are lists. Recursively, the last and first elements + of the inner lists are merged because they are strings. + ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] + + Non-overlapping object fields are combined. + {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} + + Overlapping object fields are merged. + {"a": "1"}, {"a": "2"} => {"a": "12"} + + Examples of merging objects containing lists of strings. + {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} + + For a more complete example, suppose a streaming SQL query + is yielding a result set whose rows contain a single string + field. The following ``PartialResultSet``\ s might be + yielded: + + :: + + { + "metadata": { ... } + "values": ["Hello", "W"] + "chunked_value": true + "resume_token": "Af65..." + } + { + "values": ["orl"] + "chunked_value": true + } + { + "values": ["d"] + "resume_token": "Zx1B..." + } + + This sequence of ``PartialResultSet``\ s encodes two rows, + one containing the field value ``"Hello"``, and a second + containing the field value ``"World" = "W" + "orl" + "d"``. + + Not all ``PartialResultSet``\ s contain a ``resume_token``. + Execution can only be resumed from a previously yielded + ``resume_token``. For the above sequence of + ``PartialResultSet``\ s, resuming the query with + ``"resume_token": "Af65..."`` yields results from the + ``PartialResultSet`` with value "orl". + chunked_value (bool): + If true, then the final value in + [values][google.spanner.v1.PartialResultSet.values] is + chunked, and must be combined with more values from + subsequent ``PartialResultSet``\ s to obtain a complete + field value. + resume_token (bytes): + Streaming calls might be interrupted for a variety of + reasons, such as TCP connection loss. If this occurs, the + stream of results can be resumed by re-sending the original + request and including ``resume_token``. Note that executing + any other transaction in the same session invalidates the + token. + stats (google.cloud.spanner_v1.types.ResultSetStats): + Query plan and execution statistics for the statement that + produced this streaming result set. These can be requested + by setting + [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + and are sent only once with the last response in the stream. + This field is also present in the last response for DML + statements. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. A precommit token is included if the read-write + transaction has multiplexed sessions enabled. Pass the + precommit token with the highest sequence number from this + transaction attempt to the + [Commit][google.spanner.v1.Spanner.Commit] request for this + transaction. + last (bool): + Optional. Indicates whether this is the last + ``PartialResultSet`` in the stream. The server might + optionally set this field. Clients shouldn't rely on this + field being set in all cases. + """ + + metadata: 'ResultSetMetadata' = proto.Field( + proto.MESSAGE, + number=1, + message='ResultSetMetadata', + ) + values: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + chunked_value: bool = proto.Field( + proto.BOOL, + number=3, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=4, + ) + stats: 'ResultSetStats' = proto.Field( + proto.MESSAGE, + number=5, + message='ResultSetStats', + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=8, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + last: bool = proto.Field( + proto.BOOL, + number=9, + ) + + +class ResultSetMetadata(proto.Message): + r"""Metadata about a [ResultSet][google.spanner.v1.ResultSet] or + [PartialResultSet][google.spanner.v1.PartialResultSet]. + + Attributes: + row_type (google.cloud.spanner_v1.types.StructType): + Indicates the field names and types for the rows in the + result set. For example, a SQL query like + ``"SELECT UserId, UserName FROM Users"`` could return a + ``row_type`` value like: + + :: + + "fields": [ + { "name": "UserId", "type": { "code": "INT64" } }, + { "name": "UserName", "type": { "code": "STRING" } }, + ] + transaction (google.cloud.spanner_v1.types.Transaction): + If the read or SQL query began a transaction + as a side-effect, the information about the new + transaction is yielded here. + undeclared_parameters (google.cloud.spanner_v1.types.StructType): + A SQL query can be parameterized. In PLAN mode, these + parameters can be undeclared. This indicates the field names + and types for those undeclared parameters in the SQL query. + For example, a SQL query like + ``"SELECT * FROM Users where UserId = @userId and UserName = @userName "`` + could return a ``undeclared_parameters`` value like: + + :: + + "fields": [ + { "name": "UserId", "type": { "code": "INT64" } }, + { "name": "UserName", "type": { "code": "STRING" } }, + ] + """ + + row_type: gs_type.StructType = proto.Field( + proto.MESSAGE, + number=1, + message=gs_type.StructType, + ) + transaction: gs_transaction.Transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.Transaction, + ) + undeclared_parameters: gs_type.StructType = proto.Field( + proto.MESSAGE, + number=3, + message=gs_type.StructType, + ) + + +class ResultSetStats(proto.Message): + r"""Additional statistics about a + [ResultSet][google.spanner.v1.ResultSet] or + [PartialResultSet][google.spanner.v1.PartialResultSet]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + query_plan (google.cloud.spanner_v1.types.QueryPlan): + [QueryPlan][google.spanner.v1.QueryPlan] for the query + associated with this result. + query_stats (google.protobuf.struct_pb2.Struct): + Aggregated statistics from the execution of the query. Only + present when the query is profiled. For example, a query + could return the statistics as follows: + + :: + + { + "rows_returned": "3", + "elapsed_time": "1.22 secs", + "cpu_time": "1.19 secs" + } + row_count_exact (int): + Standard DML returns an exact count of rows + that were modified. + + This field is a member of `oneof`_ ``row_count``. + row_count_lower_bound (int): + Partitioned DML doesn't offer exactly-once + semantics, so it returns a lower bound of the + rows modified. + + This field is a member of `oneof`_ ``row_count``. + """ + + query_plan: gs_query_plan.QueryPlan = proto.Field( + proto.MESSAGE, + number=1, + message=gs_query_plan.QueryPlan, + ) + query_stats: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + row_count_exact: int = proto.Field( + proto.INT64, + number=3, + oneof='row_count', + ) + row_count_lower_bound: int = proto.Field( + proto.INT64, + number=4, + oneof='row_count', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py new file mode 100644 index 0000000000..2742de8269 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py @@ -0,0 +1,1782 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_v1.types import keys +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import transaction as gs_transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'CreateSessionRequest', + 'BatchCreateSessionsRequest', + 'BatchCreateSessionsResponse', + 'Session', + 'GetSessionRequest', + 'ListSessionsRequest', + 'ListSessionsResponse', + 'DeleteSessionRequest', + 'RequestOptions', + 'DirectedReadOptions', + 'ExecuteSqlRequest', + 'ExecuteBatchDmlRequest', + 'ExecuteBatchDmlResponse', + 'PartitionOptions', + 'PartitionQueryRequest', + 'PartitionReadRequest', + 'Partition', + 'PartitionResponse', + 'ReadRequest', + 'BeginTransactionRequest', + 'CommitRequest', + 'RollbackRequest', + 'BatchWriteRequest', + 'BatchWriteResponse', + }, +) + + +class CreateSessionRequest(proto.Message): + r"""The request for + [CreateSession][google.spanner.v1.Spanner.CreateSession]. + + Attributes: + database (str): + Required. The database in which the new + session is created. + session (google.cloud.spanner_v1.types.Session): + Required. The session to create. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + session: 'Session' = proto.Field( + proto.MESSAGE, + number=2, + message='Session', + ) + + +class BatchCreateSessionsRequest(proto.Message): + r"""The request for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + Attributes: + database (str): + Required. The database in which the new + sessions are created. + session_template (google.cloud.spanner_v1.types.Session): + Parameters to apply to each created session. + session_count (int): + Required. The number of sessions to be created in this batch + call. The API can return fewer than the requested number of + sessions. If a specific number of sessions are desired, the + client can make additional calls to ``BatchCreateSessions`` + (adjusting + [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] + as necessary). + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + session_template: 'Session' = proto.Field( + proto.MESSAGE, + number=2, + message='Session', + ) + session_count: int = proto.Field( + proto.INT32, + number=3, + ) + + +class BatchCreateSessionsResponse(proto.Message): + r"""The response for + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. + + Attributes: + session (MutableSequence[google.cloud.spanner_v1.types.Session]): + The freshly created sessions. + """ + + session: MutableSequence['Session'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Session', + ) + + +class Session(proto.Message): + r"""A session in the Cloud Spanner API. + + Attributes: + name (str): + Output only. The name of the session. This is + always system-assigned. + labels (MutableMapping[str, str]): + The labels for the session. + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + - No more than 64 labels can be associated with a given + session. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the session + is created. + approximate_last_use_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The approximate timestamp when + the session is last used. It's typically earlier + than the actual last use time. + creator_role (str): + The database role which created this session. + multiplexed (bool): + Optional. If ``true``, specifies a multiplexed session. Use + a multiplexed session for multiple, concurrent read-only + operations. Don't use them for read-write transactions, + partitioned reads, or partitioned queries. Use + [``sessions.create``][google.spanner.v1.Spanner.CreateSession] + to create multiplexed sessions. Don't use + [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions] + to create a multiplexed session. You can't delete or list + multiplexed sessions. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + approximate_last_use_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + creator_role: str = proto.Field( + proto.STRING, + number=5, + ) + multiplexed: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class GetSessionRequest(proto.Message): + r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. + + Attributes: + name (str): + Required. The name of the session to + retrieve. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSessionsRequest(proto.Message): + r"""The request for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Attributes: + database (str): + Required. The database in which to list + sessions. + page_size (int): + Number of sessions to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] + from a previous + [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. + filter (str): + An expression for filtering the results of the request. + Filter rules are case insensitive. The fields eligible for + filtering are: + + - ``labels.key`` where key is the name of a label + + Some examples of using filters are: + + - ``labels.env:*`` --> The session has the label "env". + - ``labels.env:dev`` --> The session has the label "env" and + the value of the label contains the string "dev". + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSessionsResponse(proto.Message): + r"""The response for + [ListSessions][google.spanner.v1.Spanner.ListSessions]. + + Attributes: + sessions (MutableSequence[google.cloud.spanner_v1.types.Session]): + The list of requested sessions. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListSessions][google.spanner.v1.Spanner.ListSessions] call + to fetch more of the matching sessions. + """ + + @property + def raw_page(self): + return self + + sessions: MutableSequence['Session'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Session', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSessionRequest(proto.Message): + r"""The request for + [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. + + Attributes: + name (str): + Required. The name of the session to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RequestOptions(proto.Message): + r"""Common request options for various APIs. + + Attributes: + priority (google.cloud.spanner_v1.types.RequestOptions.Priority): + Priority for the request. + request_tag (str): + A per-request tag which can be applied to queries or reads, + used for statistics collection. Both ``request_tag`` and + ``transaction_tag`` can be specified for a read or query + that belongs to a transaction. This field is ignored for + requests where it's not applicable (for example, + ``CommitRequest``). Legal characters for ``request_tag`` + values are all printable characters (ASCII 32 - 126) and the + length of a request_tag is limited to 50 characters. Values + that exceed this limit are truncated. Any leading underscore + (\_) characters are removed from the string. + transaction_tag (str): + A tag used for statistics collection about this transaction. + Both ``request_tag`` and ``transaction_tag`` can be + specified for a read or query that belongs to a transaction. + The value of transaction_tag should be the same for all + requests belonging to the same transaction. If this request + doesn't belong to any transaction, ``transaction_tag`` is + ignored. Legal characters for ``transaction_tag`` values are + all printable characters (ASCII 32 - 126) and the length of + a ``transaction_tag`` is limited to 50 characters. Values + that exceed this limit are truncated. Any leading underscore + (\_) characters are removed from the string. + """ + class Priority(proto.Enum): + r"""The relative priority for requests. Note that priority isn't + applicable for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + The priority acts as a hint to the Cloud Spanner scheduler and + doesn't guarantee priority or order of execution. For example: + + - Some parts of a write operation always execute at + ``PRIORITY_HIGH``, regardless of the specified priority. This can + cause you to see an increase in high priority workload even when + executing a low priority request. This can also potentially cause + a priority inversion where a lower priority request is fulfilled + ahead of a higher priority request. + - If a transaction contains multiple operations with different + priorities, Cloud Spanner doesn't guarantee to process the higher + priority operations first. There might be other constraints to + satisfy, such as the order of operations. + + Values: + PRIORITY_UNSPECIFIED (0): + ``PRIORITY_UNSPECIFIED`` is equivalent to ``PRIORITY_HIGH``. + PRIORITY_LOW (1): + This specifies that the request is low + priority. + PRIORITY_MEDIUM (2): + This specifies that the request is medium + priority. + PRIORITY_HIGH (3): + This specifies that the request is high + priority. + """ + PRIORITY_UNSPECIFIED = 0 + PRIORITY_LOW = 1 + PRIORITY_MEDIUM = 2 + PRIORITY_HIGH = 3 + + priority: Priority = proto.Field( + proto.ENUM, + number=1, + enum=Priority, + ) + request_tag: str = proto.Field( + proto.STRING, + number=2, + ) + transaction_tag: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DirectedReadOptions(proto.Message): + r"""The ``DirectedReadOptions`` can be used to indicate which replicas + or regions should be used for non-transactional reads or queries. + + ``DirectedReadOptions`` can only be specified for a read-only + transaction, otherwise the API returns an ``INVALID_ARGUMENT`` + error. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.IncludeReplicas): + ``Include_replicas`` indicates the order of replicas (as + they appear in this list) to process the request. If + ``auto_failover_disabled`` is set to ``true`` and all + replicas are exhausted without finding a healthy replica, + Spanner waits for a replica in the list to become available, + requests might fail due to ``DEADLINE_EXCEEDED`` errors. + + This field is a member of `oneof`_ ``replicas``. + exclude_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.ExcludeReplicas): + ``Exclude_replicas`` indicates that specified replicas + should be excluded from serving requests. Spanner doesn't + route requests to the replicas in this list. + + This field is a member of `oneof`_ ``replicas``. + """ + + class ReplicaSelection(proto.Message): + r"""The directed read replica selector. Callers must provide one or more + of the following fields for replica selection: + + - ``location`` - The location must be one of the regions within the + multi-region configuration of your database. + - ``type`` - The type of the replica. + + Some examples of using replica_selectors are: + + - ``location:us-east1`` --> The "us-east1" replica(s) of any + available type is used to process the request. + - ``type:READ_ONLY`` --> The "READ_ONLY" type replica(s) in the + nearest available location are used to process the request. + - ``location:us-east1 type:READ_ONLY`` --> The "READ_ONLY" type + replica(s) in location "us-east1" is used to process the request. + + Attributes: + location (str): + The location or region of the serving + requests, for example, "us-east1". + type_ (google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection.Type): + The type of replica. + """ + class Type(proto.Enum): + r"""Indicates the type of replica. + + Values: + TYPE_UNSPECIFIED (0): + Not specified. + READ_WRITE (1): + Read-write replicas support both reads and + writes. + READ_ONLY (2): + Read-only replicas only support reads (not + writes). + """ + TYPE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'DirectedReadOptions.ReplicaSelection.Type' = proto.Field( + proto.ENUM, + number=2, + enum='DirectedReadOptions.ReplicaSelection.Type', + ) + + class IncludeReplicas(proto.Message): + r"""An ``IncludeReplicas`` contains a repeated set of + ``ReplicaSelection`` which indicates the order in which replicas + should be considered. + + Attributes: + replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): + The directed read replica selector. + auto_failover_disabled (bool): + If ``true``, Spanner doesn't route requests to a replica + outside the <``include_replicas`` list when all of the + specified replicas are unavailable or unhealthy. Default + value is ``false``. + """ + + replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DirectedReadOptions.ReplicaSelection', + ) + auto_failover_disabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class ExcludeReplicas(proto.Message): + r"""An ExcludeReplicas contains a repeated set of + ReplicaSelection that should be excluded from serving requests. + + Attributes: + replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): + The directed read replica selector. + """ + + replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DirectedReadOptions.ReplicaSelection', + ) + + include_replicas: IncludeReplicas = proto.Field( + proto.MESSAGE, + number=1, + oneof='replicas', + message=IncludeReplicas, + ) + exclude_replicas: ExcludeReplicas = proto.Field( + proto.MESSAGE, + number=2, + oneof='replicas', + message=ExcludeReplicas, + ) + + +class ExecuteSqlRequest(proto.Message): + r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] + and + [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. + + Attributes: + session (str): + Required. The session in which the SQL query + should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + The transaction to use. + + For queries, if none is provided, the default is + a temporary read-only transaction with strong + concurrency. + + Standard DML statements require a read-write + transaction. To protect against replays, + single-use transactions are not supported. The + caller must either supply an existing + transaction ID or begin a new transaction. + + Partitioned DML requires an existing Partitioned + DML transaction ID. + sql (str): + Required. The SQL string. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + SQL string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names must conform to the naming + requirements of identifiers as specified at + https://cloud.google.com/spanner/docs/lexical#identifiers. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It's an error to execute a SQL statement with unbound + parameters. + param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): + It isn't always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON + strings. + + In these cases, you can use ``param_types`` to specify the + exact SQL type for some or all of the SQL statement + parameters. See the definition of + [Type][google.spanner.v1.Type] for more information about + SQL types. + resume_token (bytes): + If this request is resuming a previously interrupted SQL + statement execution, ``resume_token`` should be copied from + the last + [PartialResultSet][google.spanner.v1.PartialResultSet] + yielded before the interruption. Doing this enables the new + SQL statement execution to resume where the last one left + off. The rest of the request parameters must exactly match + the request that yielded this token. + query_mode (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode): + Used to control the amount of debugging information returned + in [ResultSetStats][google.spanner.v1.ResultSetStats]. If + [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] + is set, + [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + can only be set to + [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. + partition_token (bytes): + If present, results are restricted to the specified + partition previously created using ``PartitionQuery``. There + must be an exact match for the values of fields common to + this message and the ``PartitionQueryRequest`` message used + to create this ``partition_token``. + seqno (int): + A per-transaction sequence number used to + identify this request. This field makes each + request idempotent such that if the request is + received multiple times, at most one succeeds. + + The sequence number must be monotonically + increasing within the transaction. If a request + arrives for the first time with an out-of-order + sequence number, the transaction can be aborted. + Replays of previously handled requests yield the + same response as the first execution. + + Required for DML statements. Ignored for + queries. + query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): + Query optimizer configuration to use for the + given query. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): + Directed read options for this request. + data_boost_enabled (bool): + If this is for a partitioned query and this field is set to + ``true``, the request is executed with Spanner Data Boost + independent compute resources. + + If the field is set to ``true`` but the request doesn't set + ``partition_token``, the API returns an ``INVALID_ARGUMENT`` + error. + last_statement (bool): + Optional. If set to ``true``, this statement marks the end + of the transaction. After this statement executes, you must + commit or abort the transaction. Attempts to execute any + other requests against this transaction (including reads and + queries) are rejected. + + For DML statements, setting this option might cause some + error reporting to be deferred until commit time (for + example, validation of unique constraints). Given this, + successful execution of a DML statement shouldn't be assumed + until a subsequent ``Commit`` call completes successfully. + """ + class QueryMode(proto.Enum): + r"""Mode in which the statement must be processed. + + Values: + NORMAL (0): + The default mode. Only the statement results + are returned. + PLAN (1): + This mode returns only the query plan, + without any results or execution statistics + information. + PROFILE (2): + This mode returns the query plan, overall + execution statistics, operator level execution + statistics along with the results. This has a + performance overhead compared to the other + modes. It isn't recommended to use this mode for + production traffic. + WITH_STATS (3): + This mode returns the overall (but not + operator-level) execution statistics along with + the results. + WITH_PLAN_AND_STATS (4): + This mode returns the query plan, overall + (but not operator-level) execution statistics + along with the results. + """ + NORMAL = 0 + PLAN = 1 + PROFILE = 2 + WITH_STATS = 3 + WITH_PLAN_AND_STATS = 4 + + class QueryOptions(proto.Message): + r"""Query optimizer configuration. + + Attributes: + optimizer_version (str): + An option to control the selection of optimizer version. + + This parameter allows individual queries to pick different + query optimizer versions. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest supported query optimizer version. If not + specified, Cloud Spanner uses the optimizer version set at + the database level options. Any other positive integer (from + the list of supported optimizer versions) overrides the + default optimizer version for query execution. + + The list of supported optimizer versions can be queried from + ``SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS``. + + Executing a SQL statement with an invalid optimizer version + fails with an ``INVALID_ARGUMENT`` error. + + See + https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer + for more information on managing the query optimizer. + + The ``optimizer_version`` statement hint has precedence over + this setting. + optimizer_statistics_package (str): + An option to control the selection of optimizer statistics + package. + + This parameter allows individual queries to use a different + query optimizer statistics package. + + Specifying ``latest`` as a value instructs Cloud Spanner to + use the latest generated statistics package. If not + specified, Cloud Spanner uses the statistics package set at + the database level options, or the latest package if the + database option isn't set. + + The statistics package requested by the query has to be + exempt from garbage collection. This can be achieved with + the following DDL statement: + + .. code:: sql + + ALTER STATISTICS SET OPTIONS (allow_gc=false) + + The list of available statistics packages can be queried + from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. + + Executing a SQL statement with an invalid optimizer + statistics package or with a statistics package that allows + garbage collection fails with an ``INVALID_ARGUMENT`` error. + """ + + optimizer_version: str = proto.Field( + proto.STRING, + number=1, + ) + optimizer_statistics_package: str = proto.Field( + proto.STRING, + number=2, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + sql: str = proto.Field( + proto.STRING, + number=3, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + param_types: MutableMapping[str, gs_type.Type] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=gs_type.Type, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=6, + ) + query_mode: QueryMode = proto.Field( + proto.ENUM, + number=7, + enum=QueryMode, + ) + partition_token: bytes = proto.Field( + proto.BYTES, + number=8, + ) + seqno: int = proto.Field( + proto.INT64, + number=9, + ) + query_options: QueryOptions = proto.Field( + proto.MESSAGE, + number=10, + message=QueryOptions, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=11, + message='RequestOptions', + ) + directed_read_options: 'DirectedReadOptions' = proto.Field( + proto.MESSAGE, + number=15, + message='DirectedReadOptions', + ) + data_boost_enabled: bool = proto.Field( + proto.BOOL, + number=16, + ) + last_statement: bool = proto.Field( + proto.BOOL, + number=17, + ) + + +class ExecuteBatchDmlRequest(proto.Message): + r"""The request for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + + Attributes: + session (str): + Required. The session in which the DML + statements should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Required. The transaction to use. Must be a + read-write transaction. + To protect against replays, single-use + transactions are not supported. The caller must + either supply an existing transaction ID or + begin a new transaction. + statements (MutableSequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement]): + Required. The list of statements to execute in this batch. + Statements are executed serially, such that the effects of + statement ``i`` are visible to statement ``i+1``. Each + statement must be a DML statement. Execution stops at the + first failed statement; the remaining statements are not + executed. + + Callers must provide at least one statement. + seqno (int): + Required. A per-transaction sequence number + used to identify this request. This field makes + each request idempotent such that if the request + is received multiple times, at most one + succeeds. + + The sequence number must be monotonically + increasing within the transaction. If a request + arrives for the first time with an out-of-order + sequence number, the transaction might be + aborted. Replays of previously handled requests + yield the same response as the first execution. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + last_statements (bool): + Optional. If set to ``true``, this request marks the end of + the transaction. After these statements execute, you must + commit or abort the transaction. Attempts to execute any + other requests against this transaction (including reads and + queries) are rejected. + + Setting this option might cause some error reporting to be + deferred until commit time (for example, validation of + unique constraints). Given this, successful execution of + statements shouldn't be assumed until a subsequent + ``Commit`` call completes successfully. + """ + + class Statement(proto.Message): + r"""A single DML statement. + + Attributes: + sql (str): + Required. The DML string. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + DML string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names can contain letters, + numbers, and underscores. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It's an error to execute a SQL statement with unbound + parameters. + param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): + It isn't always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] + as JSON strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL statement + parameters. See the definition of + [Type][google.spanner.v1.Type] for more information about + SQL types. + """ + + sql: str = proto.Field( + proto.STRING, + number=1, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + param_types: MutableMapping[str, gs_type.Type] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message=gs_type.Type, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + statements: MutableSequence[Statement] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Statement, + ) + seqno: int = proto.Field( + proto.INT64, + number=4, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=5, + message='RequestOptions', + ) + last_statements: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class ExecuteBatchDmlResponse(proto.Message): + r"""The response for + [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. + Contains a list of [ResultSet][google.spanner.v1.ResultSet] + messages, one for each DML statement that has successfully executed, + in the same order as the statements in the request. If a statement + fails, the status in the response body identifies the cause of the + failure. + + To check for DML statements that failed, use the following approach: + + 1. Check the status in the response message. The + [google.rpc.Code][google.rpc.Code] enum value ``OK`` indicates + that all statements were executed successfully. + 2. If the status was not ``OK``, check the number of result sets in + the response. If the response contains ``N`` + [ResultSet][google.spanner.v1.ResultSet] messages, then statement + ``N+1`` in the request failed. + + Example 1: + + - Request: 5 DML statements, all executed successfully. + - Response: 5 [ResultSet][google.spanner.v1.ResultSet] messages, + with the status ``OK``. + + Example 2: + + - Request: 5 DML statements. The third statement has a syntax error. + - Response: 2 [ResultSet][google.spanner.v1.ResultSet] messages, and + a syntax error (``INVALID_ARGUMENT``) status. The number of + [ResultSet][google.spanner.v1.ResultSet] messages indicates that + the third statement failed, and the fourth and fifth statements + were not executed. + + Attributes: + result_sets (MutableSequence[google.cloud.spanner_v1.types.ResultSet]): + One [ResultSet][google.spanner.v1.ResultSet] for each + statement in the request that ran successfully, in the same + order as the statements in the request. Each + [ResultSet][google.spanner.v1.ResultSet] does not contain + any rows. The + [ResultSetStats][google.spanner.v1.ResultSetStats] in each + [ResultSet][google.spanner.v1.ResultSet] contain the number + of rows modified by the statement. + + Only the first [ResultSet][google.spanner.v1.ResultSet] in + the response contains valid + [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. + status (google.rpc.status_pb2.Status): + If all DML statements are executed successfully, the status + is ``OK``. Otherwise, the error status of the first failed + statement. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. A precommit token is included if the read-write + transaction is on a multiplexed session. Pass the precommit + token with the highest sequence number from this transaction + attempt should be passed to the + [Commit][google.spanner.v1.Spanner.Commit] request for this + transaction. + """ + + result_sets: MutableSequence[result_set.ResultSet] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=result_set.ResultSet, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=3, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + + +class PartitionOptions(proto.Message): + r"""Options for a ``PartitionQueryRequest`` and + ``PartitionReadRequest``. + + Attributes: + partition_size_bytes (int): + **Note:** This hint is currently ignored by + ``PartitionQuery`` and ``PartitionRead`` requests. + + The desired data size for each partition generated. The + default for this option is currently 1 GiB. This is only a + hint. The actual size of each partition can be smaller or + larger than this size request. + max_partitions (int): + **Note:** This hint is currently ignored by + ``PartitionQuery`` and ``PartitionRead`` requests. + + The desired maximum number of partitions to return. For + example, this might be set to the number of workers + available. The default for this option is currently 10,000. + The maximum value is currently 200,000. This is only a hint. + The actual number of partitions returned can be smaller or + larger than this maximum count request. + """ + + partition_size_bytes: int = proto.Field( + proto.INT64, + number=1, + ) + max_partitions: int = proto.Field( + proto.INT64, + number=2, + ) + + +class PartitionQueryRequest(proto.Message): + r"""The request for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] + + Attributes: + session (str): + Required. The session used to create the + partitions. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Read-only snapshot transactions are + supported, read and write and single-use + transactions are not. + sql (str): + Required. The query request to generate partitions for. The + request fails if the query isn't root partitionable. For a + query to be root partitionable, it needs to satisfy a few + conditions. For example, if the query execution plan + contains a distributed union operator, then it must be the + first operator in the plan. For more information about other + conditions, see `Read data in + parallel `__. + + The query request must not contain DML commands, such as + ``INSERT``, ``UPDATE``, or ``DELETE``. Use + [``ExecuteStreamingSql``][google.spanner.v1.Spanner.ExecuteStreamingSql] + with a ``PartitionedDml`` transaction for large, + partition-friendly DML operations. + params (google.protobuf.struct_pb2.Struct): + Parameter names and values that bind to placeholders in the + SQL string. + + A parameter placeholder consists of the ``@`` character + followed by the parameter name (for example, + ``@firstName``). Parameter names can contain letters, + numbers, and underscores. + + Parameters can appear anywhere that a literal value is + expected. The same parameter name can be used more than + once, for example: + + ``"WHERE id > @msg_id AND id < @msg_id + 100"`` + + It's an error to execute a SQL statement with unbound + parameters. + param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): + It isn't always possible for Cloud Spanner to infer the + right SQL type from a JSON value. For example, values of + type ``BYTES`` and values of type ``STRING`` both appear in + [params][google.spanner.v1.PartitionQueryRequest.params] as + JSON strings. + + In these cases, ``param_types`` can be used to specify the + exact SQL type for some or all of the SQL query parameters. + See the definition of [Type][google.spanner.v1.Type] for + more information about SQL types. + partition_options (google.cloud.spanner_v1.types.PartitionOptions): + Additional options that affect how many + partitions are created. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + sql: str = proto.Field( + proto.STRING, + number=3, + ) + params: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + param_types: MutableMapping[str, gs_type.Type] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message=gs_type.Type, + ) + partition_options: 'PartitionOptions' = proto.Field( + proto.MESSAGE, + number=6, + message='PartitionOptions', + ) + + +class PartitionReadRequest(proto.Message): + r"""The request for + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + Attributes: + session (str): + Required. The session used to create the + partitions. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + Read only snapshot transactions are + supported, read/write and single use + transactions are not. + table (str): + Required. The name of the table in the + database to be read. + index (str): + If non-empty, the name of an index on + [table][google.spanner.v1.PartitionReadRequest.table]. This + index is used instead of the table primary key when + interpreting + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + and sorting result rows. See + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + for further information. + columns (MutableSequence[str]): + The columns of + [table][google.spanner.v1.PartitionReadRequest.table] to be + returned for each row matching this request. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. ``key_set`` identifies the rows to be yielded. + ``key_set`` names the primary keys of the rows in + [table][google.spanner.v1.PartitionReadRequest.table] to be + yielded, unless + [index][google.spanner.v1.PartitionReadRequest.index] is + present. If + [index][google.spanner.v1.PartitionReadRequest.index] is + present, then + [key_set][google.spanner.v1.PartitionReadRequest.key_set] + instead names index keys in + [index][google.spanner.v1.PartitionReadRequest.index]. + + It isn't an error for the ``key_set`` to name rows that + don't exist in the database. Read yields nothing for + nonexistent rows. + partition_options (google.cloud.spanner_v1.types.PartitionOptions): + Additional options that affect how many + partitions are created. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + table: str = proto.Field( + proto.STRING, + number=3, + ) + index: str = proto.Field( + proto.STRING, + number=4, + ) + columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + key_set: keys.KeySet = proto.Field( + proto.MESSAGE, + number=6, + message=keys.KeySet, + ) + partition_options: 'PartitionOptions' = proto.Field( + proto.MESSAGE, + number=9, + message='PartitionOptions', + ) + + +class Partition(proto.Message): + r"""Information returned for each partition returned in a + PartitionResponse. + + Attributes: + partition_token (bytes): + This token can be passed to ``Read``, ``StreamingRead``, + ``ExecuteSql``, or ``ExecuteStreamingSql`` requests to + restrict the results to those identified by this partition + token. + """ + + partition_token: bytes = proto.Field( + proto.BYTES, + number=1, + ) + + +class PartitionResponse(proto.Message): + r"""The response for + [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] or + [PartitionRead][google.spanner.v1.Spanner.PartitionRead] + + Attributes: + partitions (MutableSequence[google.cloud.spanner_v1.types.Partition]): + Partitions created by this request. + transaction (google.cloud.spanner_v1.types.Transaction): + Transaction created by this request. + """ + + partitions: MutableSequence['Partition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Partition', + ) + transaction: gs_transaction.Transaction = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.Transaction, + ) + + +class ReadRequest(proto.Message): + r"""The request for [Read][google.spanner.v1.Spanner.Read] and + [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. + + Attributes: + session (str): + Required. The session in which the read + should be performed. + transaction (google.cloud.spanner_v1.types.TransactionSelector): + The transaction to use. If none is provided, + the default is a temporary read-only transaction + with strong concurrency. + table (str): + Required. The name of the table in the + database to be read. + index (str): + If non-empty, the name of an index on + [table][google.spanner.v1.ReadRequest.table]. This index is + used instead of the table primary key when interpreting + [key_set][google.spanner.v1.ReadRequest.key_set] and sorting + result rows. See + [key_set][google.spanner.v1.ReadRequest.key_set] for further + information. + columns (MutableSequence[str]): + Required. The columns of + [table][google.spanner.v1.ReadRequest.table] to be returned + for each row matching this request. + key_set (google.cloud.spanner_v1.types.KeySet): + Required. ``key_set`` identifies the rows to be yielded. + ``key_set`` names the primary keys of the rows in + [table][google.spanner.v1.ReadRequest.table] to be yielded, + unless [index][google.spanner.v1.ReadRequest.index] is + present. If [index][google.spanner.v1.ReadRequest.index] is + present, then + [key_set][google.spanner.v1.ReadRequest.key_set] instead + names index keys in + [index][google.spanner.v1.ReadRequest.index]. + + If the + [partition_token][google.spanner.v1.ReadRequest.partition_token] + field is empty, rows are yielded in table primary key order + (if [index][google.spanner.v1.ReadRequest.index] is empty) + or index key order (if + [index][google.spanner.v1.ReadRequest.index] is non-empty). + If the + [partition_token][google.spanner.v1.ReadRequest.partition_token] + field isn't empty, rows are yielded in an unspecified order. + + It isn't an error for the ``key_set`` to name rows that + don't exist in the database. Read yields nothing for + nonexistent rows. + limit (int): + If greater than zero, only the first ``limit`` rows are + yielded. If ``limit`` is zero, the default is no limit. A + limit can't be specified if ``partition_token`` is set. + resume_token (bytes): + If this request is resuming a previously interrupted read, + ``resume_token`` should be copied from the last + [PartialResultSet][google.spanner.v1.PartialResultSet] + yielded before the interruption. Doing this enables the new + read to resume where the last read left off. The rest of the + request parameters must exactly match the request that + yielded this token. + partition_token (bytes): + If present, results are restricted to the specified + partition previously created using ``PartitionRead``. There + must be an exact match for the values of fields common to + this message and the PartitionReadRequest message used to + create this partition_token. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): + Directed read options for this request. + data_boost_enabled (bool): + If this is for a partitioned read and this field is set to + ``true``, the request is executed with Spanner Data Boost + independent compute resources. + + If the field is set to ``true`` but the request doesn't set + ``partition_token``, the API returns an ``INVALID_ARGUMENT`` + error. + order_by (google.cloud.spanner_v1.types.ReadRequest.OrderBy): + Optional. Order for the returned rows. + + By default, Spanner returns result rows in primary key order + except for PartitionRead requests. For applications that + don't require rows to be returned in primary key + (``ORDER_BY_PRIMARY_KEY``) order, setting + ``ORDER_BY_NO_ORDER`` option allows Spanner to optimize row + retrieval, resulting in lower latencies in certain cases + (for example, bulk point lookups). + lock_hint (google.cloud.spanner_v1.types.ReadRequest.LockHint): + Optional. Lock Hint for the request, it can + only be used with read-write transactions. + """ + class OrderBy(proto.Enum): + r"""An option to control the order in which rows are returned + from a read. + + Values: + ORDER_BY_UNSPECIFIED (0): + Default value. + + ``ORDER_BY_UNSPECIFIED`` is equivalent to + ``ORDER_BY_PRIMARY_KEY``. + ORDER_BY_PRIMARY_KEY (1): + Read rows are returned in primary key order. + + In the event that this option is used in conjunction with + the ``partition_token`` field, the API returns an + ``INVALID_ARGUMENT`` error. + ORDER_BY_NO_ORDER (2): + Read rows are returned in any order. + """ + ORDER_BY_UNSPECIFIED = 0 + ORDER_BY_PRIMARY_KEY = 1 + ORDER_BY_NO_ORDER = 2 + + class LockHint(proto.Enum): + r"""A lock hint mechanism for reads done within a transaction. + + Values: + LOCK_HINT_UNSPECIFIED (0): + Default value. + + ``LOCK_HINT_UNSPECIFIED`` is equivalent to + ``LOCK_HINT_SHARED``. + LOCK_HINT_SHARED (1): + Acquire shared locks. + + By default when you perform a read as part of a read-write + transaction, Spanner acquires shared read locks, which + allows other reads to still access the data until your + transaction is ready to commit. When your transaction is + committing and writes are being applied, the transaction + attempts to upgrade to an exclusive lock for any data you + are writing. For more information about locks, see `Lock + modes `__. + LOCK_HINT_EXCLUSIVE (2): + Acquire exclusive locks. + + Requesting exclusive locks is beneficial if you observe high + write contention, which means you notice that multiple + transactions are concurrently trying to read and write to + the same data, resulting in a large number of aborts. This + problem occurs when two transactions initially acquire + shared locks and then both try to upgrade to exclusive locks + at the same time. In this situation both transactions are + waiting for the other to give up their lock, resulting in a + deadlocked situation. Spanner is able to detect this + occurring and force one of the transactions to abort. + However, this is a slow and expensive operation and results + in lower performance. In this case it makes sense to acquire + exclusive locks at the start of the transaction because then + when multiple transactions try to act on the same data, they + automatically get serialized. Each transaction waits its + turn to acquire the lock and avoids getting into deadlock + situations. + + Because the exclusive lock hint is just a hint, it shouldn't + be considered equivalent to a mutex. In other words, you + shouldn't use Spanner exclusive locks as a mutual exclusion + mechanism for the execution of code outside of Spanner. + + **Note:** Request exclusive locks judiciously because they + block others from reading that data for the entire + transaction, rather than just when the writes are being + performed. Unless you observe high write contention, you + should use the default of shared read locks so you don't + prematurely block other clients from reading the data that + you're writing to. + """ + LOCK_HINT_UNSPECIFIED = 0 + LOCK_HINT_SHARED = 1 + LOCK_HINT_EXCLUSIVE = 2 + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction: gs_transaction.TransactionSelector = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionSelector, + ) + table: str = proto.Field( + proto.STRING, + number=3, + ) + index: str = proto.Field( + proto.STRING, + number=4, + ) + columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + key_set: keys.KeySet = proto.Field( + proto.MESSAGE, + number=6, + message=keys.KeySet, + ) + limit: int = proto.Field( + proto.INT64, + number=8, + ) + resume_token: bytes = proto.Field( + proto.BYTES, + number=9, + ) + partition_token: bytes = proto.Field( + proto.BYTES, + number=10, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=11, + message='RequestOptions', + ) + directed_read_options: 'DirectedReadOptions' = proto.Field( + proto.MESSAGE, + number=14, + message='DirectedReadOptions', + ) + data_boost_enabled: bool = proto.Field( + proto.BOOL, + number=15, + ) + order_by: OrderBy = proto.Field( + proto.ENUM, + number=16, + enum=OrderBy, + ) + lock_hint: LockHint = proto.Field( + proto.ENUM, + number=17, + enum=LockHint, + ) + + +class BeginTransactionRequest(proto.Message): + r"""The request for + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. + + Attributes: + session (str): + Required. The session in which the + transaction runs. + options (google.cloud.spanner_v1.types.TransactionOptions): + Required. Options for the new transaction. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. Priority is ignored for + this request. Setting the priority in this + ``request_options`` struct doesn't do anything. To set the + priority for a transaction, set it on the reads and writes + that are part of this transaction instead. + mutation_key (google.cloud.spanner_v1.types.Mutation): + Optional. Required for read-write + transactions on a multiplexed session that + commit mutations but don't perform any reads or + queries. You must randomly select one of the + mutations from the mutation set and send it as a + part of this request. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + options: gs_transaction.TransactionOptions = proto.Field( + proto.MESSAGE, + number=2, + message=gs_transaction.TransactionOptions, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='RequestOptions', + ) + mutation_key: mutation.Mutation = proto.Field( + proto.MESSAGE, + number=4, + message=mutation.Mutation, + ) + + +class CommitRequest(proto.Message): + r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + session (str): + Required. The session in which the + transaction to be committed is running. + transaction_id (bytes): + Commit a previously-started transaction. + + This field is a member of `oneof`_ ``transaction``. + single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): + Execute mutations in a temporary transaction. Note that + unlike commit of a previously-started transaction, commit + with a temporary transaction is non-idempotent. That is, if + the ``CommitRequest`` is sent to Cloud Spanner more than + once (for instance, due to retries in the application, or in + the transport library), it's possible that the mutations are + executed more than once. If this is undesirable, use + [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] + and [Commit][google.spanner.v1.Spanner.Commit] instead. + + This field is a member of `oneof`_ ``transaction``. + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + The mutations to be executed when this + transaction commits. All mutations are applied + atomically, in the order they appear in this + list. + return_commit_stats (bool): + If ``true``, then statistics related to the transaction is + included in the + [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. + Default value is ``false``. + max_commit_delay (google.protobuf.duration_pb2.Duration): + Optional. The amount of latency this request + is configured to incur in order to improve + throughput. If this field isn't set, Spanner + assumes requests are relatively latency + sensitive and automatically determines an + appropriate delay time. You can specify a commit + delay value between 0 and 500 ms. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + Optional. If the read-write transaction was executed on a + multiplexed session, then you must include the precommit + token with the highest sequence number received in this + transaction attempt. Failing to do so results in a + ``FailedPrecondition`` error. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction_id: bytes = proto.Field( + proto.BYTES, + number=2, + oneof='transaction', + ) + single_use_transaction: gs_transaction.TransactionOptions = proto.Field( + proto.MESSAGE, + number=3, + oneof='transaction', + message=gs_transaction.TransactionOptions, + ) + mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=mutation.Mutation, + ) + return_commit_stats: bool = proto.Field( + proto.BOOL, + number=5, + ) + max_commit_delay: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=6, + message='RequestOptions', + ) + precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( + proto.MESSAGE, + number=9, + message=gs_transaction.MultiplexedSessionPrecommitToken, + ) + + +class RollbackRequest(proto.Message): + r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. + + Attributes: + session (str): + Required. The session in which the + transaction to roll back is running. + transaction_id (bytes): + Required. The transaction to roll back. + """ + + session: str = proto.Field( + proto.STRING, + number=1, + ) + transaction_id: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class BatchWriteRequest(proto.Message): + r"""The request for [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. + + Attributes: + session (str): + Required. The session in which the batch + request is to be run. + request_options (google.cloud.spanner_v1.types.RequestOptions): + Common options for this request. + mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): + Required. The groups of mutations to be + applied. + exclude_txn_from_change_streams (bool): + Optional. If you don't set the + ``exclude_txn_from_change_streams`` option or if it's set to + ``false``, then any change streams monitoring columns + modified by transactions will capture the updates made + within that transaction. + """ + + class MutationGroup(proto.Message): + r"""A group of mutations to be committed together. Related + mutations should be placed in a group. For example, two + mutations inserting rows with the same primary key prefix in + both parent and child tables are related. + + Attributes: + mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): + Required. The mutations in this group. + """ + + mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=mutation.Mutation, + ) + + session: str = proto.Field( + proto.STRING, + number=1, + ) + request_options: 'RequestOptions' = proto.Field( + proto.MESSAGE, + number=3, + message='RequestOptions', + ) + mutation_groups: MutableSequence[MutationGroup] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=MutationGroup, + ) + exclude_txn_from_change_streams: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class BatchWriteResponse(proto.Message): + r"""The result of applying a batch of mutations. + + Attributes: + indexes (MutableSequence[int]): + The mutation groups applied in this batch. The values index + into the ``mutation_groups`` field in the corresponding + ``BatchWriteRequest``. + status (google.rpc.status_pb2.Status): + An ``OK`` status indicates success. Any other status + indicates a failure. + commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): + The commit timestamp of the transaction that applied this + batch. Present if ``status`` is ``OK``, absent otherwise. + """ + + indexes: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=1, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + commit_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py new file mode 100644 index 0000000000..5e13285565 --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py @@ -0,0 +1,491 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'TransactionOptions', + 'Transaction', + 'TransactionSelector', + 'MultiplexedSessionPrecommitToken', + }, +) + + +class TransactionOptions(proto.Message): + r"""Options to use for transactions. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): + Transaction may write. + + Authorization to begin a read-write transaction requires + ``spanner.databases.beginOrRollbackReadWriteTransaction`` + permission on the ``session`` resource. + + This field is a member of `oneof`_ ``mode``. + partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): + Partitioned DML transaction. + + Authorization to begin a Partitioned DML transaction + requires + ``spanner.databases.beginPartitionedDmlTransaction`` + permission on the ``session`` resource. + + This field is a member of `oneof`_ ``mode``. + read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): + Transaction does not write. + + Authorization to begin a read-only transaction requires + ``spanner.databases.beginReadOnlyTransaction`` permission on + the ``session`` resource. + + This field is a member of `oneof`_ ``mode``. + exclude_txn_from_change_streams (bool): + When ``exclude_txn_from_change_streams`` is set to ``true``, + it prevents read or write transactions from being tracked in + change streams. + + - If the DDL option ``allow_txn_exclusion`` is set to + ``true``, then the updates made within this transaction + aren't recorded in the change stream. + + - If you don't set the DDL option ``allow_txn_exclusion`` or + if it's set to ``false``, then the updates made within + this transaction are recorded in the change stream. + + When ``exclude_txn_from_change_streams`` is set to ``false`` + or not set, modifications from this transaction are recorded + in all change streams that are tracking columns modified by + these transactions. + + The ``exclude_txn_from_change_streams`` option can only be + specified for read-write or partitioned DML transactions, + otherwise the API returns an ``INVALID_ARGUMENT`` error. + isolation_level (google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel): + Isolation level for the transaction. + """ + class IsolationLevel(proto.Enum): + r"""``IsolationLevel`` is used when setting ``isolation_level`` for a + transaction. + + Values: + ISOLATION_LEVEL_UNSPECIFIED (0): + Default value. + + If the value is not specified, the ``SERIALIZABLE`` + isolation level is used. + SERIALIZABLE (1): + All transactions appear as if they executed in a serial + order, even if some of the reads, writes, and other + operations of distinct transactions actually occurred in + parallel. Spanner assigns commit timestamps that reflect the + order of committed transactions to implement this property. + Spanner offers a stronger guarantee than serializability + called external consistency. For more information, see + `TrueTime and external + consistency `__. + REPEATABLE_READ (2): + All reads performed during the transaction observe a + consistent snapshot of the database, and the transaction is + only successfully committed in the absence of conflicts + between its updates and any concurrent updates that have + occurred since that snapshot. Consequently, in contrast to + ``SERIALIZABLE`` transactions, only write-write conflicts + are detected in snapshot transactions. + + This isolation level does not support Read-only and + Partitioned DML transactions. + + When ``REPEATABLE_READ`` is specified on a read-write + transaction, the locking semantics default to + ``OPTIMISTIC``. + """ + ISOLATION_LEVEL_UNSPECIFIED = 0 + SERIALIZABLE = 1 + REPEATABLE_READ = 2 + + class ReadWrite(proto.Message): + r"""Message type to initiate a read-write transaction. Currently + this transaction type has no options. + + Attributes: + read_lock_mode (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite.ReadLockMode): + Read lock mode for the transaction. + multiplexed_session_previous_transaction_id (bytes): + Optional. Clients should pass the transaction + ID of the previous transaction attempt that was + aborted if this transaction is being executed on + a multiplexed session. + """ + class ReadLockMode(proto.Enum): + r"""``ReadLockMode`` is used to set the read lock mode for read-write + transactions. + + Values: + READ_LOCK_MODE_UNSPECIFIED (0): + Default value. + + - If isolation level is + [REPEATABLE_READ][google.spanner.v1.TransactionOptions.IsolationLevel.REPEATABLE_READ], + then it is an error to specify ``read_lock_mode``. Locking + semantics default to ``OPTIMISTIC``. No validation checks + are done for reads, except to validate that the data that + was served at the snapshot time is unchanged at commit + time in the following cases: + + 1. reads done as part of queries that use + ``SELECT FOR UPDATE`` + 2. reads done as part of statements with a + ``LOCK_SCANNED_RANGES`` hint + 3. reads done as part of DML statements + + - At all other isolation levels, if ``read_lock_mode`` is + the default value, then pessimistic read locks are used. + PESSIMISTIC (1): + Pessimistic lock mode. + + Read locks are acquired immediately on read. Semantics + described only applies to + [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] + isolation. + OPTIMISTIC (2): + Optimistic lock mode. + + Locks for reads within the transaction are not acquired on + read. Instead the locks are acquired on a commit to validate + that read/queried data has not changed since the transaction + started. Semantics described only applies to + [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] + isolation. + """ + READ_LOCK_MODE_UNSPECIFIED = 0 + PESSIMISTIC = 1 + OPTIMISTIC = 2 + + read_lock_mode: 'TransactionOptions.ReadWrite.ReadLockMode' = proto.Field( + proto.ENUM, + number=1, + enum='TransactionOptions.ReadWrite.ReadLockMode', + ) + multiplexed_session_previous_transaction_id: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + class PartitionedDml(proto.Message): + r"""Message type to initiate a Partitioned DML transaction. + """ + + class ReadOnly(proto.Message): + r"""Message type to initiate a read-only transaction. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + strong (bool): + Read at a timestamp where all previously + committed transactions are visible. + + This field is a member of `oneof`_ ``timestamp_bound``. + min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Executes all reads at a timestamp >= ``min_read_timestamp``. + + This is useful for requesting fresher data than some + previous read, or data that is fresh enough to observe the + effects of some previously committed transaction whose + timestamp is known. + + Note that this option can only be used in single-use + transactions. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + + This field is a member of `oneof`_ ``timestamp_bound``. + max_staleness (google.protobuf.duration_pb2.Duration): + Read data at a timestamp >= ``NOW - max_staleness`` seconds. + Guarantees that all writes that have committed more than the + specified number of seconds ago are visible. Because Cloud + Spanner chooses the exact timestamp, this mode works even if + the client's local clock is substantially skewed from Cloud + Spanner commit timestamps. + + Useful for reading the freshest data available at a nearby + replica, while bounding the possible staleness if the local + replica has fallen behind. + + Note that this option can only be used in single-use + transactions. + + This field is a member of `oneof`_ ``timestamp_bound``. + read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Executes all reads at the given timestamp. Unlike other + modes, reads at a specific timestamp are repeatable; the + same read at the same timestamp always returns the same + data. If the timestamp is in the future, the read is blocked + until the specified timestamp, modulo the read's deadline. + + Useful for large scale consistent reads such as mapreduces, + or for coordinating many reads against a consistent snapshot + of the data. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + + This field is a member of `oneof`_ ``timestamp_bound``. + exact_staleness (google.protobuf.duration_pb2.Duration): + Executes all reads at a timestamp that is + ``exact_staleness`` old. The timestamp is chosen soon after + the read is started. + + Guarantees that all writes that have committed more than the + specified number of seconds ago are visible. Because Cloud + Spanner chooses the exact timestamp, this mode works even if + the client's local clock is substantially skewed from Cloud + Spanner commit timestamps. + + Useful for reading at nearby replicas without the + distributed timestamp negotiation overhead of + ``max_staleness``. + + This field is a member of `oneof`_ ``timestamp_bound``. + return_read_timestamp (bool): + If true, the Cloud Spanner-selected read timestamp is + included in the [Transaction][google.spanner.v1.Transaction] + message that describes the transaction. + """ + + strong: bool = proto.Field( + proto.BOOL, + number=1, + oneof='timestamp_bound', + ) + min_read_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof='timestamp_bound', + message=timestamp_pb2.Timestamp, + ) + max_staleness: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + oneof='timestamp_bound', + message=duration_pb2.Duration, + ) + read_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + oneof='timestamp_bound', + message=timestamp_pb2.Timestamp, + ) + exact_staleness: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + oneof='timestamp_bound', + message=duration_pb2.Duration, + ) + return_read_timestamp: bool = proto.Field( + proto.BOOL, + number=6, + ) + + read_write: ReadWrite = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ReadWrite, + ) + partitioned_dml: PartitionedDml = proto.Field( + proto.MESSAGE, + number=3, + oneof='mode', + message=PartitionedDml, + ) + read_only: ReadOnly = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=ReadOnly, + ) + exclude_txn_from_change_streams: bool = proto.Field( + proto.BOOL, + number=5, + ) + isolation_level: IsolationLevel = proto.Field( + proto.ENUM, + number=6, + enum=IsolationLevel, + ) + + +class Transaction(proto.Message): + r"""A transaction. + + Attributes: + id (bytes): + ``id`` may be used to identify the transaction in subsequent + [Read][google.spanner.v1.Spanner.Read], + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], + [Commit][google.spanner.v1.Spanner.Commit], or + [Rollback][google.spanner.v1.Spanner.Rollback] calls. + + Single-use read-only transactions do not have IDs, because + single-use transactions do not support multiple requests. + read_timestamp (google.protobuf.timestamp_pb2.Timestamp): + For snapshot read-only transactions, the read timestamp + chosen for the transaction. Not returned by default: see + [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. + + A timestamp in RFC3339 UTC "Zulu" format, accurate to + nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. + precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): + A precommit token is included in the response of a + BeginTransaction request if the read-write transaction is on + a multiplexed session and a mutation_key was specified in + the + [BeginTransaction][google.spanner.v1.BeginTransactionRequest]. + The precommit token with the highest sequence number from + this transaction attempt should be passed to the + [Commit][google.spanner.v1.Spanner.Commit] request for this + transaction. + """ + + id: bytes = proto.Field( + proto.BYTES, + number=1, + ) + read_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + precommit_token: 'MultiplexedSessionPrecommitToken' = proto.Field( + proto.MESSAGE, + number=3, + message='MultiplexedSessionPrecommitToken', + ) + + +class TransactionSelector(proto.Message): + r"""This message is used to select the transaction in which a + [Read][google.spanner.v1.Spanner.Read] or + [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. + + See [TransactionOptions][google.spanner.v1.TransactionOptions] for + more information about transactions. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + single_use (google.cloud.spanner_v1.types.TransactionOptions): + Execute the read or SQL query in a temporary + transaction. This is the most efficient way to + execute a transaction that consists of a single + SQL query. + + This field is a member of `oneof`_ ``selector``. + id (bytes): + Execute the read or SQL query in a + previously-started transaction. + + This field is a member of `oneof`_ ``selector``. + begin (google.cloud.spanner_v1.types.TransactionOptions): + Begin a new transaction and execute this read or SQL query + in it. The transaction ID of the new transaction is returned + in + [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], + which is a [Transaction][google.spanner.v1.Transaction]. + + This field is a member of `oneof`_ ``selector``. + """ + + single_use: 'TransactionOptions' = proto.Field( + proto.MESSAGE, + number=1, + oneof='selector', + message='TransactionOptions', + ) + id: bytes = proto.Field( + proto.BYTES, + number=2, + oneof='selector', + ) + begin: 'TransactionOptions' = proto.Field( + proto.MESSAGE, + number=3, + oneof='selector', + message='TransactionOptions', + ) + + +class MultiplexedSessionPrecommitToken(proto.Message): + r"""When a read-write transaction is executed on a multiplexed session, + this precommit token is sent back to the client as a part of the + [Transaction][google.spanner.v1.Transaction] message in the + [BeginTransaction][google.spanner.v1.BeginTransactionRequest] + response and also as a part of the + [ResultSet][google.spanner.v1.ResultSet] and + [PartialResultSet][google.spanner.v1.PartialResultSet] responses. + + Attributes: + precommit_token (bytes): + Opaque precommit token. + seq_num (int): + An incrementing seq number is generated on + every precommit token that is returned. Clients + should remember the precommit token with the + highest sequence number from the current + transaction attempt. + """ + + precommit_token: bytes = proto.Field( + proto.BYTES, + number=1, + ) + seq_num: int = proto.Field( + proto.INT32, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py new file mode 100644 index 0000000000..5b6365599e --- /dev/null +++ b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.v1', + manifest={ + 'TypeCode', + 'TypeAnnotationCode', + 'Type', + 'StructType', + }, +) + + +class TypeCode(proto.Enum): + r"""``TypeCode`` is used as part of [Type][google.spanner.v1.Type] to + indicate the type of a Cloud Spanner value. + + Each legal value of a type can be encoded to or decoded from a JSON + value, using the encodings described below. All Cloud Spanner values + can be ``null``, regardless of type; ``null``\ s are always encoded + as a JSON ``null``. + + Values: + TYPE_CODE_UNSPECIFIED (0): + Not specified. + BOOL (1): + Encoded as JSON ``true`` or ``false``. + INT64 (2): + Encoded as ``string``, in decimal format. + FLOAT64 (3): + Encoded as ``number``, or the strings ``"NaN"``, + ``"Infinity"``, or ``"-Infinity"``. + FLOAT32 (15): + Encoded as ``number``, or the strings ``"NaN"``, + ``"Infinity"``, or ``"-Infinity"``. + TIMESTAMP (4): + Encoded as ``string`` in RFC 3339 timestamp format. The time + zone must be present, and must be ``"Z"``. + + If the schema has the column option + ``allow_commit_timestamp=true``, the placeholder string + ``"spanner.commit_timestamp()"`` can be used to instruct the + system to insert the commit timestamp associated with the + transaction commit. + DATE (5): + Encoded as ``string`` in RFC 3339 date format. + STRING (6): + Encoded as ``string``. + BYTES (7): + Encoded as a base64-encoded ``string``, as described in RFC + 4648, section 4. + ARRAY (8): + Encoded as ``list``, where the list elements are represented + according to + [array_element_type][google.spanner.v1.Type.array_element_type]. + STRUCT (9): + Encoded as ``list``, where list element ``i`` is represented + according to + [struct_type.fields[i]][google.spanner.v1.StructType.fields]. + NUMERIC (10): + Encoded as ``string``, in decimal format or scientific + notation format. Decimal format: ``[+-]Digits[.[Digits]]`` + or ``[+-][Digits].Digits`` + + Scientific notation: + ``[+-]Digits[.[Digits]][ExponentIndicator[+-]Digits]`` or + ``[+-][Digits].Digits[ExponentIndicator[+-]Digits]`` + (ExponentIndicator is ``"e"`` or ``"E"``) + JSON (11): + Encoded as a JSON-formatted ``string`` as described in RFC + 7159. The following rules are applied when parsing JSON + input: + + - Whitespace characters are not preserved. + - If a JSON object has duplicate keys, only the first key is + preserved. + - Members of a JSON object are not guaranteed to have their + order preserved. + - JSON array elements will have their order preserved. + PROTO (13): + Encoded as a base64-encoded ``string``, as described in RFC + 4648, section 4. + ENUM (14): + Encoded as ``string``, in decimal format. + INTERVAL (16): + Encoded as ``string``, in ``ISO8601`` duration format - + ``P[n]Y[n]M[n]DT[n]H[n]M[n[.fraction]]S`` where ``n`` is an + integer. For example, ``P1Y2M3DT4H5M6.5S`` represents time + duration of 1 year, 2 months, 3 days, 4 hours, 5 minutes, + and 6.5 seconds. + UUID (17): + Encoded as ``string``, in lower-case hexa-decimal format, as + described in RFC 9562, section 4. + """ + TYPE_CODE_UNSPECIFIED = 0 + BOOL = 1 + INT64 = 2 + FLOAT64 = 3 + FLOAT32 = 15 + TIMESTAMP = 4 + DATE = 5 + STRING = 6 + BYTES = 7 + ARRAY = 8 + STRUCT = 9 + NUMERIC = 10 + JSON = 11 + PROTO = 13 + ENUM = 14 + INTERVAL = 16 + UUID = 17 + + +class TypeAnnotationCode(proto.Enum): + r"""``TypeAnnotationCode`` is used as a part of + [Type][google.spanner.v1.Type] to disambiguate SQL types that should + be used for a given Cloud Spanner value. Disambiguation is needed + because the same Cloud Spanner type can be mapped to different SQL + types depending on SQL dialect. TypeAnnotationCode doesn't affect + the way value is serialized. + + Values: + TYPE_ANNOTATION_CODE_UNSPECIFIED (0): + Not specified. + PG_NUMERIC (2): + PostgreSQL compatible NUMERIC type. This annotation needs to + be applied to [Type][google.spanner.v1.Type] instances + having [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] type + code to specify that values of this type should be treated + as PostgreSQL NUMERIC values. Currently this annotation is + always needed for + [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] when a client + interacts with PostgreSQL-enabled Spanner databases. + PG_JSONB (3): + PostgreSQL compatible JSONB type. This annotation needs to + be applied to [Type][google.spanner.v1.Type] instances + having [JSON][google.spanner.v1.TypeCode.JSON] type code to + specify that values of this type should be treated as + PostgreSQL JSONB values. Currently this annotation is always + needed for [JSON][google.spanner.v1.TypeCode.JSON] when a + client interacts with PostgreSQL-enabled Spanner databases. + PG_OID (4): + PostgreSQL compatible OID type. This + annotation can be used by a client interacting + with PostgreSQL-enabled Spanner database to + specify that a value should be treated using the + semantics of the OID type. + """ + TYPE_ANNOTATION_CODE_UNSPECIFIED = 0 + PG_NUMERIC = 2 + PG_JSONB = 3 + PG_OID = 4 + + +class Type(proto.Message): + r"""``Type`` indicates the type of a Cloud Spanner value, as might be + stored in a table cell or returned from an SQL query. + + Attributes: + code (google.cloud.spanner_v1.types.TypeCode): + Required. The [TypeCode][google.spanner.v1.TypeCode] for + this type. + array_element_type (google.cloud.spanner_v1.types.Type): + If [code][google.spanner.v1.Type.code] == + [ARRAY][google.spanner.v1.TypeCode.ARRAY], then + ``array_element_type`` is the type of the array elements. + struct_type (google.cloud.spanner_v1.types.StructType): + If [code][google.spanner.v1.Type.code] == + [STRUCT][google.spanner.v1.TypeCode.STRUCT], then + ``struct_type`` provides type information for the struct's + fields. + type_annotation (google.cloud.spanner_v1.types.TypeAnnotationCode): + The + [TypeAnnotationCode][google.spanner.v1.TypeAnnotationCode] + that disambiguates SQL type that Spanner will use to + represent values of this type during query processing. This + is necessary for some type codes because a single + [TypeCode][google.spanner.v1.TypeCode] can be mapped to + different SQL types depending on the SQL dialect. + [type_annotation][google.spanner.v1.Type.type_annotation] + typically is not needed to process the content of a value + (it doesn't affect serialization) and clients can ignore it + on the read path. + proto_type_fqn (str): + If [code][google.spanner.v1.Type.code] == + [PROTO][google.spanner.v1.TypeCode.PROTO] or + [code][google.spanner.v1.Type.code] == + [ENUM][google.spanner.v1.TypeCode.ENUM], then + ``proto_type_fqn`` is the fully qualified name of the proto + type representing the proto/enum definition. + """ + + code: 'TypeCode' = proto.Field( + proto.ENUM, + number=1, + enum='TypeCode', + ) + array_element_type: 'Type' = proto.Field( + proto.MESSAGE, + number=2, + message='Type', + ) + struct_type: 'StructType' = proto.Field( + proto.MESSAGE, + number=3, + message='StructType', + ) + type_annotation: 'TypeAnnotationCode' = proto.Field( + proto.ENUM, + number=4, + enum='TypeAnnotationCode', + ) + proto_type_fqn: str = proto.Field( + proto.STRING, + number=5, + ) + + +class StructType(proto.Message): + r"""``StructType`` defines the fields of a + [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. + + Attributes: + fields (MutableSequence[google.cloud.spanner_v1.types.StructType.Field]): + The list of fields that make up this struct. Order is + significant, because values of this struct type are + represented as lists, where the order of field values + matches the order of fields in the + [StructType][google.spanner.v1.StructType]. In turn, the + order of fields matches the order of columns in a read + request, or the order of fields in the ``SELECT`` clause of + a query. + """ + + class Field(proto.Message): + r"""Message representing a single field of a struct. + + Attributes: + name (str): + The name of the field. For reads, this is the column name. + For SQL queries, it is the column alias (e.g., ``"Word"`` in + the query ``"SELECT 'hello' AS Word"``), or the column name + (e.g., ``"ColName"`` in the query + ``"SELECT ColName FROM Table"``). Some columns might have an + empty name (e.g., ``"SELECT UPPER(ColName)"``). Note that a + query result can contain multiple fields with the same name. + type_ (google.cloud.spanner_v1.types.Type): + The type of the field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'Type' = proto.Field( + proto.MESSAGE, + number=2, + message='Type', + ) + + fields: MutableSequence[Field] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Field, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/mypy.ini b/owl-bot-staging/spanner/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/spanner/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/spanner/v1/noxfile.py b/owl-bot-staging/spanner/v1/noxfile.py new file mode 100644 index 0000000000..594241417c --- /dev/null +++ b/owl-bot-staging/spanner/v1/noxfile.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] + +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-spanner" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=PREVIEW_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json new file mode 100644 index 0000000000..f7f33c3d29 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json @@ -0,0 +1,2579 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.spanner.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-spanner", + "version": "0.0.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_create_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchCreateSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "session_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", + "shortName": "batch_create_sessions" + }, + "description": "Sample for BatchCreateSessions", + "file": "spanner_v1_generated_spanner_batch_create_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_create_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.batch_create_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchCreateSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "session_count", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", + "shortName": "batch_create_sessions" + }, + "description": "Sample for BatchCreateSessions", + "file": "spanner_v1_generated_spanner_batch_create_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_create_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_write", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchWrite", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchWriteRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "mutation_groups", + "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "spanner_v1_generated_spanner_batch_write_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchWrite_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_write_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.batch_write", + "method": { + "fullName": "google.spanner.v1.Spanner.BatchWrite", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BatchWrite" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BatchWriteRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "mutation_groups", + "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", + "shortName": "batch_write" + }, + "description": "Sample for BatchWrite", + "file": "spanner_v1_generated_spanner_batch_write_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BatchWrite_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_batch_write_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.begin_transaction", + "method": { + "fullName": "google.spanner.v1.Spanner.BeginTransaction", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BeginTransaction" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "options", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Transaction", + "shortName": "begin_transaction" + }, + "description": "Sample for BeginTransaction", + "file": "spanner_v1_generated_spanner_begin_transaction_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_begin_transaction_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.begin_transaction", + "method": { + "fullName": "google.spanner.v1.Spanner.BeginTransaction", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "BeginTransaction" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "options", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Transaction", + "shortName": "begin_transaction" + }, + "description": "Sample for BeginTransaction", + "file": "spanner_v1_generated_spanner_begin_transaction_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_begin_transaction_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.commit", + "method": { + "fullName": "google.spanner.v1.Spanner.Commit", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Commit" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CommitRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "mutations", + "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" + }, + { + "name": "single_use_transaction", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.CommitResponse", + "shortName": "commit" + }, + "description": "Sample for Commit", + "file": "spanner_v1_generated_spanner_commit_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Commit_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_commit_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.commit", + "method": { + "fullName": "google.spanner.v1.Spanner.Commit", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Commit" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CommitRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "mutations", + "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" + }, + { + "name": "single_use_transaction", + "type": "google.cloud.spanner_v1.types.TransactionOptions" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.CommitResponse", + "shortName": "commit" + }, + "description": "Sample for Commit", + "file": "spanner_v1_generated_spanner_commit_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Commit_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_commit_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.create_session", + "method": { + "fullName": "google.spanner.v1.Spanner.CreateSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "CreateSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CreateSessionRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "create_session" + }, + "description": "Sample for CreateSession", + "file": "spanner_v1_generated_spanner_create_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_CreateSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_create_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.create_session", + "method": { + "fullName": "google.spanner.v1.Spanner.CreateSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "CreateSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.CreateSessionRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "create_session" + }, + "description": "Sample for CreateSession", + "file": "spanner_v1_generated_spanner_create_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_CreateSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_create_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.delete_session", + "method": { + "fullName": "google.spanner.v1.Spanner.DeleteSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "DeleteSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_session" + }, + "description": "Sample for DeleteSession", + "file": "spanner_v1_generated_spanner_delete_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_DeleteSession_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_delete_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.delete_session", + "method": { + "fullName": "google.spanner.v1.Spanner.DeleteSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "DeleteSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_session" + }, + "description": "Sample for DeleteSession", + "file": "spanner_v1_generated_spanner_delete_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_DeleteSession_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_delete_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_batch_dml", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteBatchDml" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", + "shortName": "execute_batch_dml" + }, + "description": "Sample for ExecuteBatchDml", + "file": "spanner_v1_generated_spanner_execute_batch_dml_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_batch_dml_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.execute_batch_dml", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteBatchDml" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", + "shortName": "execute_batch_dml" + }, + "description": "Sample for ExecuteBatchDml", + "file": "spanner_v1_generated_spanner_execute_batch_dml_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_batch_dml_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "spanner_v1_generated_spanner_execute_sql_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_sql_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.execute_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "execute_sql" + }, + "description": "Sample for ExecuteSql", + "file": "spanner_v1_generated_spanner_execute_sql_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_sql_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_streaming_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteStreamingSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "execute_streaming_sql" + }, + "description": "Sample for ExecuteStreamingSql", + "file": "spanner_v1_generated_spanner_execute_streaming_sql_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_streaming_sql_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.execute_streaming_sql", + "method": { + "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ExecuteStreamingSql" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "execute_streaming_sql" + }, + "description": "Sample for ExecuteStreamingSql", + "file": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.get_session", + "method": { + "fullName": "google.spanner.v1.Spanner.GetSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "GetSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.GetSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "get_session" + }, + "description": "Sample for GetSession", + "file": "spanner_v1_generated_spanner_get_session_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_GetSession_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_get_session_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.get_session", + "method": { + "fullName": "google.spanner.v1.Spanner.GetSession", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "GetSession" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.GetSessionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.Session", + "shortName": "get_session" + }, + "description": "Sample for GetSession", + "file": "spanner_v1_generated_spanner_get_session_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_GetSession_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_get_session_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.list_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.ListSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ListSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "spanner_v1_generated_spanner_list_sessions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ListSessions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_list_sessions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.list_sessions", + "method": { + "fullName": "google.spanner.v1.Spanner.ListSessions", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "ListSessions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ListSessionsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager", + "shortName": "list_sessions" + }, + "description": "Sample for ListSessions", + "file": "spanner_v1_generated_spanner_list_sessions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_ListSessions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_list_sessions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_query", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionQuery", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_query" + }, + "description": "Sample for PartitionQuery", + "file": "spanner_v1_generated_spanner_partition_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.partition_query", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionQuery", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_query" + }, + "description": "Sample for PartitionQuery", + "file": "spanner_v1_generated_spanner_partition_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_read", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_read" + }, + "description": "Sample for PartitionRead", + "file": "spanner_v1_generated_spanner_partition_read_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionRead_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_read_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.partition_read", + "method": { + "fullName": "google.spanner.v1.Spanner.PartitionRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "PartitionRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.PartitionReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.PartitionResponse", + "shortName": "partition_read" + }, + "description": "Sample for PartitionRead", + "file": "spanner_v1_generated_spanner_partition_read_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_PartitionRead_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_partition_read_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.read", + "method": { + "fullName": "google.spanner.v1.Spanner.Read", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Read" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "read" + }, + "description": "Sample for Read", + "file": "spanner_v1_generated_spanner_read_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Read_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_read_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.read", + "method": { + "fullName": "google.spanner.v1.Spanner.Read", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Read" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_v1.types.ResultSet", + "shortName": "read" + }, + "description": "Sample for Read", + "file": "spanner_v1_generated_spanner_read_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Read_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_read_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.rollback", + "method": { + "fullName": "google.spanner.v1.Spanner.Rollback", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Rollback" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.RollbackRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "rollback" + }, + "description": "Sample for Rollback", + "file": "spanner_v1_generated_spanner_rollback_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Rollback_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_rollback_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.rollback", + "method": { + "fullName": "google.spanner.v1.Spanner.Rollback", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "Rollback" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.RollbackRequest" + }, + { + "name": "session", + "type": "str" + }, + { + "name": "transaction_id", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "rollback" + }, + "description": "Sample for Rollback", + "file": "spanner_v1_generated_spanner_rollback_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_Rollback_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_rollback_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", + "shortName": "SpannerAsyncClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.streaming_read", + "method": { + "fullName": "google.spanner.v1.Spanner.StreamingRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "StreamingRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "streaming_read" + }, + "description": "Sample for StreamingRead", + "file": "spanner_v1_generated_spanner_streaming_read_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_StreamingRead_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_streaming_read_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_v1.SpannerClient", + "shortName": "SpannerClient" + }, + "fullName": "google.cloud.spanner_v1.SpannerClient.streaming_read", + "method": { + "fullName": "google.spanner.v1.Spanner.StreamingRead", + "service": { + "fullName": "google.spanner.v1.Spanner", + "shortName": "Spanner" + }, + "shortName": "StreamingRead" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_v1.types.ReadRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", + "shortName": "streaming_read" + }, + "description": "Sample for StreamingRead", + "file": "spanner_v1_generated_spanner_streaming_read_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_Spanner_StreamingRead_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_spanner_streaming_read_sync.py" + } + ] +} diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py new file mode 100644 index 0000000000..49e64b4ab8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchCreateSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = await client.batch_create_sessions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BatchCreateSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py new file mode 100644 index 0000000000..ade1da3661 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchCreateSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_batch_create_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BatchCreateSessionsRequest( + database="database_value", + session_count=1420, + ) + + # Make the request + response = client.batch_create_sessions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BatchCreateSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py new file mode 100644 index 0000000000..d1565657e8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchWrite_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = await client.batch_write(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_BatchWrite_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py new file mode 100644 index 0000000000..9b6621def9 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchWrite +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BatchWrite_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_batch_write(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + mutation_groups = spanner_v1.MutationGroup() + mutation_groups.mutations.insert.table = "table_value" + + request = spanner_v1.BatchWriteRequest( + session="session_value", + mutation_groups=mutation_groups, + ) + + # Make the request + stream = client.batch_write(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_BatchWrite_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py new file mode 100644 index 0000000000..efdd161715 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BeginTransaction +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BeginTransaction_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = await client.begin_transaction(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BeginTransaction_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py new file mode 100644 index 0000000000..764dab8aa2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BeginTransaction +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_BeginTransaction_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_begin_transaction(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.BeginTransactionRequest( + session="session_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_BeginTransaction_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py new file mode 100644 index 0000000000..f61c297d38 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Commit +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Commit_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_commit(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = await client.commit(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Commit_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py new file mode 100644 index 0000000000..a945bd2234 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Commit +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Commit_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_commit(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CommitRequest( + transaction_id=b'transaction_id_blob', + session="session_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Commit_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py new file mode 100644 index 0000000000..8cddc00c66 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_CreateSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_create_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = await client.create_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_CreateSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py new file mode 100644 index 0000000000..b9de2d34e0 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_CreateSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_create_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.CreateSessionRequest( + database="database_value", + ) + + # Make the request + response = client.create_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_CreateSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py new file mode 100644 index 0000000000..9fed1ddca6 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_DeleteSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + await client.delete_session(request=request) + + +# [END spanner_v1_generated_Spanner_DeleteSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py new file mode 100644 index 0000000000..1f2a17e2d1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_DeleteSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_delete_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.DeleteSessionRequest( + name="name_value", + ) + + # Make the request + client.delete_session(request=request) + + +# [END spanner_v1_generated_Spanner_DeleteSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py new file mode 100644 index 0000000000..8313fd66a0 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteBatchDml +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteBatchDml_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = await client.execute_batch_dml(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteBatchDml_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py new file mode 100644 index 0000000000..dd4696b6b2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteBatchDml +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteBatchDml_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_execute_batch_dml(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + statements = spanner_v1.Statement() + statements.sql = "sql_value" + + request = spanner_v1.ExecuteBatchDmlRequest( + session="session_value", + statements=statements, + seqno=550, + ) + + # Make the request + response = client.execute_batch_dml(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteBatchDml_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py new file mode 100644 index 0000000000..a12b20f3e9 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteSql_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py new file mode 100644 index 0000000000..761d0ca251 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteSql_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_execute_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.execute_sql(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py new file mode 100644 index 0000000000..86b8eb910e --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteStreamingSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = await client.execute_streaming_sql(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py new file mode 100644 index 0000000000..dc7dba43b8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExecuteStreamingSql +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_execute_streaming_sql(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ExecuteSqlRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + stream = client.execute_streaming_sql(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py new file mode 100644 index 0000000000..d2e50f9891 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_GetSession_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_get_session(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_GetSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py new file mode 100644 index 0000000000..36d6436b04 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSession +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_GetSession_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_get_session(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.GetSessionRequest( + name="name_value", + ) + + # Make the request + response = client.get_session(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_GetSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py new file mode 100644 index 0000000000..95aa4bf818 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ListSessions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_Spanner_ListSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py new file mode 100644 index 0000000000..a9533fed0d --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_ListSessions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_list_sessions(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ListSessionsRequest( + database="database_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_Spanner_ListSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py new file mode 100644 index 0000000000..200fb2f6a2 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = await client.partition_query(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionQuery_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py new file mode 100644 index 0000000000..d486a3590c --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_partition_query(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionQueryRequest( + session="session_value", + sql="sql_value", + ) + + # Make the request + response = client.partition_query(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionQuery_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py new file mode 100644 index 0000000000..99055ade8b --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionRead_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = await client.partition_read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py new file mode 100644 index 0000000000..0ca01ac423 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PartitionRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_PartitionRead_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_partition_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.PartitionReadRequest( + session="session_value", + table="table_value", + ) + + # Make the request + response = client.partition_read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_PartitionRead_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py new file mode 100644 index 0000000000..e555865245 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Read +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Read_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = await client.read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Read_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py new file mode 100644 index 0000000000..8f9ee621f3 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Read +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Read_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + response = client.read(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_Spanner_Read_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py new file mode 100644 index 0000000000..f99a1b8dd8 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rollback +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Rollback_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_rollback(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + await client.rollback(request=request) + + +# [END spanner_v1_generated_Spanner_Rollback_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py new file mode 100644 index 0000000000..00b23b21fc --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Rollback +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_Rollback_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_rollback(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.RollbackRequest( + session="session_value", + transaction_id=b'transaction_id_blob', + ) + + # Make the request + client.rollback(request=request) + + +# [END spanner_v1_generated_Spanner_Rollback_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py new file mode 100644 index 0000000000..f79b9a96a1 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_StreamingRead_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +async def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerAsyncClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = await client.streaming_read(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_StreamingRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py new file mode 100644 index 0000000000..f81ed34b33 --- /dev/null +++ b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingRead +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner + + +# [START spanner_v1_generated_Spanner_StreamingRead_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_v1 + + +def sample_streaming_read(): + # Create a client + client = spanner_v1.SpannerClient() + + # Initialize request argument(s) + request = spanner_v1.ReadRequest( + session="session_value", + table="table_value", + columns=['columns_value1', 'columns_value2'], + ) + + # Make the request + stream = client.streaming_read(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END spanner_v1_generated_Spanner_StreamingRead_sync] diff --git a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py new file mode 100644 index 0000000000..c7f41be11e --- /dev/null +++ b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py @@ -0,0 +1,191 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spannerCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_sessions': ('database', 'session_count', 'session_template', ), + 'batch_write': ('session', 'mutation_groups', 'request_options', 'exclude_txn_from_change_streams', ), + 'begin_transaction': ('session', 'options', 'request_options', 'mutation_key', ), + 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'max_commit_delay', 'request_options', 'precommit_token', ), + 'create_session': ('database', 'session', ), + 'delete_session': ('name', ), + 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', 'last_statements', ), + 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), + 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), + 'get_session': ('name', ), + 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), + 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), + 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), + 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), + 'rollback': ('session', 'transaction_id', ), + 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spannerCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner/v1/setup.py b/owl-bot-staging/spanner/v1/setup.py new file mode 100644 index 0000000000..41d728098c --- /dev/null +++ b/owl-bot-staging/spanner/v1/setup.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-spanner' + + +description = "Google Cloud Spanner API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/spanner/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt new file mode 100644 index 0000000000..1e93c60e50 --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt @@ -0,0 +1,12 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt new file mode 100644 index 0000000000..1e93c60e50 --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt @@ -0,0 +1,12 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..5d29dea386 --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt @@ -0,0 +1,12 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..93e6826f2a --- /dev/null +++ b/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf diff --git a/owl-bot-staging/spanner/v1/tests/__init__.py b/owl-bot-staging/spanner/v1/tests/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py new file mode 100644 index 0000000000..b90788ab0f --- /dev/null +++ b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py @@ -0,0 +1,11446 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient +from google.cloud.spanner_v1.services.spanner import SpannerClient +from google.cloud.spanner_v1.services.spanner import pagers +from google.cloud.spanner_v1.services.spanner import transports +from google.cloud.spanner_v1.types import commit_response +from google.cloud.spanner_v1.types import keys +from google.cloud.spanner_v1.types import mutation +from google.cloud.spanner_v1.types import result_set +from google.cloud.spanner_v1.types import spanner +from google.cloud.spanner_v1.types import transaction +from google.cloud.spanner_v1.types import type as gs_type +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SpannerClient._get_default_mtls_endpoint(None) is None + assert SpannerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert SpannerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SpannerClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SpannerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + SpannerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SpannerClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SpannerClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SpannerClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SpannerClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SpannerClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SpannerClient._get_client_cert_source(None, False) is None + assert SpannerClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert SpannerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert SpannerClient._get_client_cert_source(None, True) is mock_default_cert_source + assert SpannerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SpannerClient._DEFAULT_UNIVERSE + default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert SpannerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == SpannerClient.DEFAULT_MTLS_ENDPOINT + assert SpannerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert SpannerClient._get_api_endpoint(None, None, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT + assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT + assert SpannerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert SpannerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + SpannerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert SpannerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert SpannerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert SpannerClient._get_universe_domain(None, None) == SpannerClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + SpannerClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SpannerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SpannerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (SpannerClient, "grpc"), + (SpannerAsyncClient, "grpc_asyncio"), + (SpannerClient, "rest"), +]) +def test_spanner_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SpannerGrpcTransport, "grpc"), + (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SpannerRestTransport, "rest"), +]) +def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SpannerClient, "grpc"), + (SpannerAsyncClient, "grpc_asyncio"), + (SpannerClient, "rest"), +]) +def test_spanner_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +def test_spanner_client_get_transport_class(): + transport = SpannerClient.get_transport_class() + available_transports = [ + transports.SpannerGrpcTransport, + transports.SpannerRestTransport, + ] + assert transport in available_transports + + transport = SpannerClient.get_transport_class("grpc") + assert transport == transports.SpannerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), + (SpannerClient, transports.SpannerRestTransport, "rest"), +]) +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +def test_spanner_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", "true"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (SpannerClient, transports.SpannerGrpcTransport, "grpc", "false"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (SpannerClient, transports.SpannerRestTransport, "rest", "true"), + (SpannerClient, transports.SpannerRestTransport, "rest", "false"), +]) +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_spanner_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SpannerClient, SpannerAsyncClient +]) +@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) +def test_spanner_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + SpannerClient, SpannerAsyncClient +]) +@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) +@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) +def test_spanner_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SpannerClient._DEFAULT_UNIVERSE + default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc"), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), + (SpannerClient, transports.SpannerRestTransport, "rest"), +]) +def test_spanner_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (SpannerClient, transports.SpannerRestTransport, "rest", None), +]) +def test_spanner_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_spanner_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = SpannerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_spanner_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + scopes=None, + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.CreateSessionRequest, + dict, +]) +def test_create_session(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + response = client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.CreateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +def test_create_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.CreateSessionRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CreateSessionRequest( + database='database_value', + ) + +def test_create_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_session] = mock_rpc + request = {} + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_session in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_session] = mock_rpc + + request = {} + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_session_async(transport: str = 'grpc_asyncio', request_type=spanner.CreateSessionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + response = await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.CreateSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.asyncio +async def test_create_session_async_from_dict(): + await test_create_session_async(request_type=dict) + +def test_create_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CreateSessionRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = spanner.Session() + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CreateSessionRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + await client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_create_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_session( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_create_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_create_session_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_session( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchCreateSessionsRequest, + dict, +]) +def test_batch_create_sessions(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse( + ) + response = client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.BatchCreateSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +def test_batch_create_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.BatchCreateSessionsRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_create_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchCreateSessionsRequest( + database='database_value', + ) + +def test_batch_create_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_create_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc + request = {} + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_create_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_create_sessions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_create_sessions] = mock_rpc + + request = {} + await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_create_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_create_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchCreateSessionsRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( + )) + response = await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.BatchCreateSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +@pytest.mark.asyncio +async def test_batch_create_sessions_async_from_dict(): + await test_batch_create_sessions_async(request_type=dict) + +def test_batch_create_sessions_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchCreateSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = spanner.BatchCreateSessionsResponse() + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_create_sessions_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchCreateSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) + await client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_batch_create_sessions_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_create_sessions( + database='database_value', + session_count=1420, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].session_count + mock_val = 1420 + assert arg == mock_val + + +def test_batch_create_sessions_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + +@pytest.mark.asyncio +async def test_batch_create_sessions_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.BatchCreateSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_create_sessions( + database='database_value', + session_count=1420, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].session_count + mock_val = 1420 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_batch_create_sessions_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.GetSessionRequest, + dict, +]) +def test_get_session(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + response = client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.GetSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +def test_get_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.GetSessionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.GetSessionRequest( + name='name_value', + ) + +def test_get_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_session] = mock_rpc + request = {} + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_session in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_session] = mock_rpc + + request = {} + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_session_async(transport: str = 'grpc_asyncio', request_type=spanner.GetSessionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + response = await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.GetSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.asyncio +async def test_get_session_async_from_dict(): + await test_get_session_async(request_type=dict) + +def test_get_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.GetSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = spanner.Session() + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.GetSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + await client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_session_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.Session() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.ListSessionsRequest, + dict, +]) +def test_list_sessions(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sessions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ListSessionsRequest( + database='database_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_sessions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ListSessionsRequest( + database='database_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_sessions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_sessions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc + + request = {} + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.ListSessionsRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ListSessionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + +def test_list_sessions_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ListSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = spanner.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ListSessionsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_list_sessions_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_list_sessions_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + + +def test_list_sessions_pager(transport_name: str = "grpc"): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('database', ''), + )), + ) + pager = client.list_sessions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner.Session) + for i in results) +def test_list_sessions_pages(transport_name: str = "grpc"): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sessions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner.Session) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sessions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner.DeleteSessionRequest, + dict, +]) +def test_delete_session(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.DeleteSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_session_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.DeleteSessionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_session(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.DeleteSessionRequest( + name='name_value', + ) + +def test_delete_session_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc + request = {} + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_session in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_session] = mock_rpc + + request = {} + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_session_async(transport: str = 'grpc_asyncio', request_type=spanner.DeleteSessionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.DeleteSessionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_session_async_from_dict(): + await test_delete_session_async(request_type=dict) + +def test_delete_session_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.DeleteSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = None + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_session_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.DeleteSessionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_session_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_session_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_session_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_session( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_session_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_sql(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = result_set.ResultSet( + ) + response = client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +def test_execute_sql_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.execute_sql(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + +def test_execute_sql_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc + request = {} + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.execute_sql in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.execute_sql] = mock_rpc + + request = {} + await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + response = await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.asyncio +async def test_execute_sql_async_from_dict(): + await test_execute_sql_async(request_type=dict) + +def test_execute_sql_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_sql_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) + await client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_streaming_sql(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([result_set.PartialResultSet()]) + response = client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, result_set.PartialResultSet) + + +def test_execute_streaming_sql_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.execute_streaming_sql(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteSqlRequest( + session='session_value', + sql='sql_value', + ) + +def test_execute_streaming_sql_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_streaming_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc + request = {} + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_streaming_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.execute_streaming_sql in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.execute_streaming_sql] = mock_rpc + + request = {} + await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_streaming_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + response = await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteSqlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, result_set.PartialResultSet) + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_async_from_dict(): + await test_execute_streaming_sql_async(request_type=dict) + +def test_execute_streaming_sql_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_streaming_sql_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteSqlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteBatchDmlRequest, + dict, +]) +def test_execute_batch_dml(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.ExecuteBatchDmlResponse( + ) + response = client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteBatchDmlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +def test_execute_batch_dml_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ExecuteBatchDmlRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.execute_batch_dml(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ExecuteBatchDmlRequest( + session='session_value', + ) + +def test_execute_batch_dml_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_batch_dml in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc + request = {} + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_batch_dml(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_batch_dml_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.execute_batch_dml in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.execute_batch_dml] = mock_rpc + + request = {} + await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.execute_batch_dml(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_execute_batch_dml_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteBatchDmlRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( + )) + response = await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ExecuteBatchDmlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +@pytest.mark.asyncio +async def test_execute_batch_dml_async_from_dict(): + await test_execute_batch_dml_async(request_type=dict) + +def test_execute_batch_dml_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteBatchDmlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = spanner.ExecuteBatchDmlResponse() + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_execute_batch_dml_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ExecuteBatchDmlRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse()) + await client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_read(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = result_set.ResultSet( + ) + response = client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +def test_read_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.read(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + +def test_read_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.read] = mock_rpc + request = {} + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.read in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.read] = mock_rpc + + request = {} + await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + response = await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.asyncio +async def test_read_async_from_dict(): + await test_read_async(request_type=dict) + +def test_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) + await client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_streaming_read(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([result_set.PartialResultSet()]) + response = client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, result_set.PartialResultSet) + + +def test_streaming_read_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.streaming_read(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.ReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + +def test_streaming_read_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.streaming_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc + request = {} + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_streaming_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.streaming_read in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.streaming_read] = mock_rpc + + request = {} + await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.streaming_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_streaming_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + response = await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.ReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, result_set.PartialResultSet) + + +@pytest.mark.asyncio +async def test_streaming_read_async_from_dict(): + await test_streaming_read_async(request_type=dict) + +def test_streaming_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_streaming_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.ReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.BeginTransactionRequest, + dict, +]) +def test_begin_transaction(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction( + id=b'id_blob', + ) + response = client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.BeginTransactionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +def test_begin_transaction_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.BeginTransactionRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.begin_transaction(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BeginTransactionRequest( + session='session_value', + ) + +def test_begin_transaction_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_begin_transaction_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.begin_transaction in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.begin_transaction] = mock_rpc + + request = {} + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=spanner.BeginTransactionRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( + id=b'id_blob', + )) + response = await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.BeginTransactionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +@pytest.mark.asyncio +async def test_begin_transaction_async_from_dict(): + await test_begin_transaction_async(request_type=dict) + +def test_begin_transaction_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BeginTransactionRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = transaction.Transaction() + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_begin_transaction_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BeginTransactionRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) + await client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_begin_transaction_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.begin_transaction( + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].options + mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + assert arg == mock_val + + +def test_begin_transaction_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = transaction.Transaction() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.begin_transaction( + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].options + mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_begin_transaction_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.CommitRequest, + dict, +]) +def test_commit(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse( + ) + response = client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.CommitRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +def test_commit_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.CommitRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.commit(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.CommitRequest( + session='session_value', + ) + +def test_commit_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.commit in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.commit] = mock_rpc + + request = {} + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_commit_async(transport: str = 'grpc_asyncio', request_type=spanner.CommitRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( + )) + response = await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.CommitRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +@pytest.mark.asyncio +async def test_commit_async_from_dict(): + await test_commit_async(request_type=dict) + +def test_commit_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CommitRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = commit_response.CommitResponse() + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_commit_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.CommitRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) + await client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_commit_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit( + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutations + mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] + assert arg == mock_val + assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + + +def test_commit_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + +@pytest.mark.asyncio +async def test_commit_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = commit_response.CommitResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit( + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutations + mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] + assert arg == mock_val + assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) + +@pytest.mark.asyncio +async def test_commit_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.RollbackRequest, + dict, +]) +def test_rollback(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.RollbackRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_rollback_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.RollbackRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.rollback(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.RollbackRequest( + session='session_value', + ) + +def test_rollback_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.rollback in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.rollback] = mock_rpc + + request = {} + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=spanner.RollbackRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.RollbackRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_rollback_async_from_dict(): + await test_rollback_async(request_type=dict) + +def test_rollback_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.RollbackRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = None + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rollback_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.RollbackRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_rollback_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].transaction_id + mock_val = b'transaction_id_blob' + assert arg == mock_val + + +def test_rollback_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + +@pytest.mark.asyncio +async def test_rollback_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].transaction_id + mock_val = b'transaction_id_blob' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rollback_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionQueryRequest, + dict, +]) +def test_partition_query(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.PartitionResponse( + ) + response = client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.PartitionQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +def test_partition_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.PartitionQueryRequest( + session='session_value', + sql='sql_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.partition_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionQueryRequest( + session='session_value', + sql='sql_value', + ) + +def test_partition_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc + request = {} + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.partition_query in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.partition_query] = mock_rpc + + request = {} + await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionQueryRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + response = await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.PartitionQueryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.asyncio +async def test_partition_query_async_from_dict(): + await test_partition_query_async(request_type=dict) + +def test_partition_query_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionQueryRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_query_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionQueryRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) + await client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionReadRequest, + dict, +]) +def test_partition_read(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner.PartitionResponse( + ) + response = client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.PartitionReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +def test_partition_read_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.PartitionReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.partition_read(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.PartitionReadRequest( + session='session_value', + table='table_value', + index='index_value', + ) + +def test_partition_read_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc + request = {} + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.partition_read in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.partition_read] = mock_rpc + + request = {} + await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.partition_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_partition_read_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionReadRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + response = await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.PartitionReadRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.asyncio +async def test_partition_read_async_from_dict(): + await test_partition_read_async(request_type=dict) + +def test_partition_read_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_partition_read_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.PartitionReadRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) + await client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchWriteRequest, + dict, +]) +def test_batch_write(request_type, transport: str = 'grpc'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + response = client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner.BatchWriteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, spanner.BatchWriteResponse) + + +def test_batch_write_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner.BatchWriteRequest( + session='session_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.batch_write(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner.BatchWriteRequest( + session='session_value', + ) + +def test_batch_write_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_write in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc + request = {} + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_write(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_write_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_write in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_write] = mock_rpc + + request = {} + await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_write(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_batch_write_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchWriteRequest): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) + response = await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner.BatchWriteRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, spanner.BatchWriteResponse) + + +@pytest.mark.asyncio +async def test_batch_write_async_from_dict(): + await test_batch_write_async(request_type=dict) + +def test_batch_write_field_headers(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchWriteRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = iter([spanner.BatchWriteResponse()]) + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_write_field_headers_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner.BatchWriteRequest() + + request.session = 'session_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) + await client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'session=session_value', + ) in kw['metadata'] + + +def test_batch_write_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_write( + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutation_groups + mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] + assert arg == mock_val + + +def test_batch_write_flattened_error(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_write( + spanner.BatchWriteRequest(), + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + +@pytest.mark.asyncio +async def test_batch_write_flattened_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([spanner.BatchWriteResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_write( + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].session + mock_val = 'session_value' + assert arg == mock_val + arg = args[0].mutation_groups + mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_batch_write_flattened_error_async(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_write( + spanner.BatchWriteRequest(), + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + +def test_create_session_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_session] = mock_rpc + + request = {} + client.create_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_session_rest_required_fields(request_type=spanner.CreateSessionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_session(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_session_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "session", ))) + + +def test_create_session_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) + + +def test_create_session_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_session( + spanner.CreateSessionRequest(), + database='database_value', + ) + + +def test_batch_create_sessions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_create_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc + + request = {} + client.batch_create_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_create_sessions_rest_required_fields(request_type=spanner.BatchCreateSessionsRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["database"] = "" + request_init["session_count"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + jsonified_request["sessionCount"] = 1420 + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + assert "sessionCount" in jsonified_request + assert jsonified_request["sessionCount"] == 1420 + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.BatchCreateSessionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_create_sessions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_create_sessions_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_create_sessions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "sessionCount", ))) + + +def test_batch_create_sessions_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchCreateSessionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + session_count=1420, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.batch_create_sessions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate" % client.transport._host, args[1]) + + +def test_batch_create_sessions_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_create_sessions( + spanner.BatchCreateSessionsRequest(), + database='database_value', + session_count=1420, + ) + + +def test_get_session_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_session] = mock_rpc + + request = {} + client.get_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_session_rest_required_fields(request_type=spanner.GetSessionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_session(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_session_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_session_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) + + +def test_get_session_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_session( + spanner.GetSessionRequest(), + name='name_value', + ) + + +def test_list_sessions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sessions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc + + request = {} + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sessions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_sessions_rest_required_fields(request_type=spanner.ListSessionsRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.ListSessionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_sessions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_sessions_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_sessions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("database", ))) + + +def test_list_sessions_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.ListSessionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_sessions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) + + +def test_list_sessions_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + spanner.ListSessionsRequest(), + database='database_value', + ) + + +def test_list_sessions_rest_pager(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + spanner.Session(), + ], + next_page_token='abc', + ), + spanner.ListSessionsResponse( + sessions=[], + next_page_token='def', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + ], + next_page_token='ghi', + ), + spanner.ListSessionsResponse( + sessions=[ + spanner.Session(), + spanner.Session(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner.ListSessionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + pager = client.list_sessions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner.Session) + for i in results) + + pages = list(client.list_sessions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_delete_session_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_session in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc + + request = {} + client.delete_session(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_session(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_session_rest_required_fields(request_type=spanner.DeleteSessionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_session(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_session_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_session._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_session_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_session(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) + + +def test_delete_session_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_session( + spanner.DeleteSessionRequest(), + name='name_value', + ) + + +def test_execute_sql_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc + + request = {} + client.execute_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["sql"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["sql"] = 'sql_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "sql" in jsonified_request + assert jsonified_request["sql"] == 'sql_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.execute_sql(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_execute_sql_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.execute_sql._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "sql", ))) + + +def test_execute_streaming_sql_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_streaming_sql in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc + + request = {} + client.execute_streaming_sql(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_streaming_sql(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_streaming_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["sql"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["sql"] = 'sql_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "sql" in jsonified_request + assert jsonified_request["sql"] == 'sql_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.execute_streaming_sql(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_execute_streaming_sql_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.execute_streaming_sql._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "sql", ))) + + +def test_execute_batch_dml_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.execute_batch_dml in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc + + request = {} + client.execute_batch_dml(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.execute_batch_dml(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_execute_batch_dml_rest_required_fields(request_type=spanner.ExecuteBatchDmlRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["seqno"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["seqno"] = 550 + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "seqno" in jsonified_request + assert jsonified_request["seqno"] == 550 + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.ExecuteBatchDmlResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.execute_batch_dml(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_execute_batch_dml_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.execute_batch_dml._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "transaction", "statements", "seqno", ))) + + +def test_read_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.read] = mock_rpc + + request = {} + client.read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_read_rest_required_fields(request_type=spanner.ReadRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["table"] = "" + request_init["columns"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["table"] = 'table_value' + jsonified_request["columns"] = 'columns_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "table" in jsonified_request + assert jsonified_request["table"] == 'table_value' + assert "columns" in jsonified_request + assert jsonified_request["columns"] == 'columns_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.read(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_read_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.read._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) + + +def test_streaming_read_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.streaming_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc + + request = {} + client.streaming_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_streaming_read_rest_required_fields(request_type=spanner.ReadRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["table"] = "" + request_init["columns"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["table"] = 'table_value' + jsonified_request["columns"] = 'columns_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "table" in jsonified_request + assert jsonified_request["table"] == 'table_value' + assert "columns" in jsonified_request + assert jsonified_request["columns"] == 'columns_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.streaming_read(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_streaming_read_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.streaming_read._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) + + +def test_begin_transaction_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.begin_transaction in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc + + request = {} + client.begin_transaction(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.begin_transaction(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_begin_transaction_rest_required_fields(request_type=spanner.BeginTransactionRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transaction.Transaction() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.begin_transaction(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_begin_transaction_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.begin_transaction._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "options", ))) + + +def test_begin_transaction_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = transaction.Transaction() + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.begin_transaction(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction" % client.transport._host, args[1]) + + +def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.begin_transaction( + spanner.BeginTransactionRequest(), + session='session_value', + options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +def test_commit_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.commit] = mock_rpc + + request = {} + client.commit(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_commit_rest_required_fields(request_type=spanner.CommitRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = commit_response.CommitResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.commit(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_commit_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.commit._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", ))) + + +def test_commit_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = commit_response.CommitResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.commit(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit" % client.transport._host, args[1]) + + +def test_commit_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit( + spanner.CommitRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], + single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), + ) + + +def test_rollback_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.rollback] = mock_rpc + + request = {} + client.rollback(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_rollback_rest_required_fields(request_type=spanner.RollbackRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["transaction_id"] = b'' + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["transactionId"] = b'transaction_id_blob' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "transactionId" in jsonified_request + assert jsonified_request["transactionId"] == b'transaction_id_blob' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.rollback(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_rollback_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.rollback._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "transactionId", ))) + + +def test_rollback_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + transaction_id=b'transaction_id_blob', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.rollback(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback" % client.transport._host, args[1]) + + +def test_rollback_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback( + spanner.RollbackRequest(), + session='session_value', + transaction_id=b'transaction_id_blob', + ) + + +def test_partition_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc + + request = {} + client.partition_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_partition_query_rest_required_fields(request_type=spanner.PartitionQueryRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["sql"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["sql"] = 'sql_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "sql" in jsonified_request + assert jsonified_request["sql"] == 'sql_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.partition_query(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_partition_query_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.partition_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "sql", ))) + + +def test_partition_read_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.partition_read in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc + + request = {} + client.partition_read(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.partition_read(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_partition_read_rest_required_fields(request_type=spanner.PartitionReadRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request_init["table"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + jsonified_request["table"] = 'table_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + assert "table" in jsonified_request + assert jsonified_request["table"] == 'table_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.partition_read(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_partition_read_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.partition_read._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "table", "keySet", ))) + + +def test_batch_write_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_write in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc + + request = {} + client.batch_write(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_write(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_write_rest_required_fields(request_type=spanner.BatchWriteRequest): + transport_class = transports.SpannerRestTransport + + request_init = {} + request_init["session"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["session"] = 'session_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "session" in jsonified_request + assert jsonified_request["session"] == 'session_value' + + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.batch_write(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_write_rest_unset_required_fields(): + transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_write._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("session", "mutationGroups", ))) + + +def test_batch_write_rest_flattened(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + client.batch_write(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite" % client.transport._host, args[1]) + + +def test_batch_write_rest_flattened_error(transport: str = 'rest'): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_write( + spanner.BatchWriteRequest(), + session='session_value', + mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpannerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpannerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpannerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SpannerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpannerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SpannerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + transports.SpannerRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = SpannerClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_session_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + call.return_value = spanner.Session() + client.create_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CreateSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_create_sessions_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + call.return_value = spanner.BatchCreateSessionsResponse() + client.batch_create_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchCreateSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_session_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + call.return_value = spanner.Session() + client.get_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.GetSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + call.return_value = spanner.ListSessionsResponse() + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ListSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_session_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + call.return_value = None + client.delete_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.DeleteSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_streaming_sql_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.execute_streaming_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_batch_dml_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + call.return_value = spanner.ExecuteBatchDmlResponse() + client.execute_batch_dml(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteBatchDmlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_read_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + call.return_value = result_set.ResultSet() + client.read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_streaming_read_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + call.return_value = iter([result_set.PartialResultSet()]) + client.streaming_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + call.return_value = transaction.Transaction() + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + call.return_value = commit_response.CommitResponse() + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + call.return_value = None + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_query_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_read_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + call.return_value = spanner.PartitionResponse() + client.partition_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_write_empty_call_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + call.return_value = iter([spanner.BatchWriteResponse()]) + client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchWriteRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SpannerAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_session_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + await client.create_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CreateSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_create_sessions_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( + )) + await client.batch_create_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchCreateSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_session_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + )) + await client.get_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.GetSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sessions_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ListSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_session_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.DeleteSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_sql_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + await client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_streaming_sql_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.execute_streaming_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_execute_batch_dml_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( + )) + await client.execute_batch_dml(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteBatchDmlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_read_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( + )) + await client.read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_streaming_read_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) + await client.streaming_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_begin_transaction_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( + id=b'id_blob', + )) + await client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( + )) + await client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_partition_query_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + await client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_partition_read_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( + )) + await client.partition_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_write_empty_call_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) + await client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchWriteRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SpannerClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_session_rest_bad_request(request_type=spanner.CreateSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_session(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.CreateSessionRequest, + dict, +]) +def test_create_session_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_session(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_session_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_create_session") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_create_session_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_create_session") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.CreateSessionRequest.pb(spanner.CreateSessionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.Session.to_json(spanner.Session()) + req.return_value.content = return_value + + request = spanner.CreateSessionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.Session() + post_with_metadata.return_value = spanner.Session(), metadata + + client.create_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_create_sessions_rest_bad_request(request_type=spanner.BatchCreateSessionsRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_create_sessions(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchCreateSessionsRequest, + dict, +]) +def test_batch_create_sessions_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchCreateSessionsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchCreateSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_create_sessions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchCreateSessionsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_sessions_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_create_sessions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.BatchCreateSessionsRequest.pb(spanner.BatchCreateSessionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.BatchCreateSessionsResponse.to_json(spanner.BatchCreateSessionsResponse()) + req.return_value.content = return_value + + request = spanner.BatchCreateSessionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.BatchCreateSessionsResponse() + post_with_metadata.return_value = spanner.BatchCreateSessionsResponse(), metadata + + client.batch_create_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_session_rest_bad_request(request_type=spanner.GetSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_session(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.GetSessionRequest, + dict, +]) +def test_get_session_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.Session( + name='name_value', + creator_role='creator_role_value', + multiplexed=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.Session.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_session(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.Session) + assert response.name == 'name_value' + assert response.creator_role == 'creator_role_value' + assert response.multiplexed is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_session_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_get_session") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_get_session_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_get_session") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.GetSessionRequest.pb(spanner.GetSessionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.Session.to_json(spanner.Session()) + req.return_value.content = return_value + + request = spanner.GetSessionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.Session() + post_with_metadata.return_value = spanner.Session(), metadata + + client.get_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_sessions_rest_bad_request(request_type=spanner.ListSessionsRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_sessions(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ListSessionsRequest, + dict, +]) +def test_list_sessions_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.ListSessionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ListSessionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_sessions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sessions_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_list_sessions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ListSessionsRequest.pb(spanner.ListSessionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.ListSessionsResponse.to_json(spanner.ListSessionsResponse()) + req.return_value.content = return_value + + request = spanner.ListSessionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.ListSessionsResponse() + post_with_metadata.return_value = spanner.ListSessionsResponse(), metadata + + client.list_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_session_rest_bad_request(request_type=spanner.DeleteSessionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_session(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.DeleteSessionRequest, + dict, +]) +def test_delete_session_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_session(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_session_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_delete_session") as pre: + pre.assert_not_called() + pb_message = spanner.DeleteSessionRequest.pb(spanner.DeleteSessionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner.DeleteSessionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_execute_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_sql(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_sql_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_sql(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_sql_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_sql") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.ResultSet.to_json(result_set.ResultSet()) + req.return_value.content = return_value + + request = spanner.ExecuteSqlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.ResultSet() + post_with_metadata.return_value = result_set.ResultSet(), metadata + + client.execute_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_execute_streaming_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_streaming_sql(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteSqlRequest, + dict, +]) +def test_execute_streaming_sql_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet( + chunked_value=True, + resume_token=b'resume_token_blob', + last=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_streaming_sql(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.PartialResultSet) + assert response.chunked_value is True + assert response.resume_token == b'resume_token_blob' + assert response.last is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_streaming_sql_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_streaming_sql") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = spanner.ExecuteSqlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.PartialResultSet() + post_with_metadata.return_value = result_set.PartialResultSet(), metadata + + client.execute_streaming_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_execute_batch_dml_rest_bad_request(request_type=spanner.ExecuteBatchDmlRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.execute_batch_dml(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ExecuteBatchDmlRequest, + dict, +]) +def test_execute_batch_dml_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.ExecuteBatchDmlResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.execute_batch_dml(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.ExecuteBatchDmlResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_execute_batch_dml_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_batch_dml") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ExecuteBatchDmlRequest.pb(spanner.ExecuteBatchDmlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.ExecuteBatchDmlResponse.to_json(spanner.ExecuteBatchDmlResponse()) + req.return_value.content = return_value + + request = spanner.ExecuteBatchDmlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.ExecuteBatchDmlResponse() + post_with_metadata.return_value = spanner.ExecuteBatchDmlResponse(), metadata + + client.execute_batch_dml(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_read_rest_bad_request(request_type=spanner.ReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.read(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_read_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.ResultSet( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.ResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.read(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.ResultSet) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_read_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_read") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_read_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_read") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.ResultSet.to_json(result_set.ResultSet()) + req.return_value.content = return_value + + request = spanner.ReadRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.ResultSet() + post_with_metadata.return_value = result_set.ResultSet(), metadata + + client.read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_streaming_read_rest_bad_request(request_type=spanner.ReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.streaming_read(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.ReadRequest, + dict, +]) +def test_streaming_read_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = result_set.PartialResultSet( + chunked_value=True, + resume_token=b'resume_token_blob', + last=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = result_set.PartialResultSet.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.streaming_read(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, result_set.PartialResultSet) + assert response.chunked_value is True + assert response.resume_token == b'resume_token_blob' + assert response.last is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_streaming_read_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_streaming_read") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = spanner.ReadRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = result_set.PartialResultSet() + post_with_metadata.return_value = result_set.PartialResultSet(), metadata + + client.streaming_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_begin_transaction_rest_bad_request(request_type=spanner.BeginTransactionRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.begin_transaction(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.BeginTransactionRequest, + dict, +]) +def test_begin_transaction_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = transaction.Transaction( + id=b'id_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = transaction.Transaction.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.begin_transaction(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transaction.Transaction) + assert response.id == b'id_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_begin_transaction_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_begin_transaction") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.BeginTransactionRequest.pb(spanner.BeginTransactionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = transaction.Transaction.to_json(transaction.Transaction()) + req.return_value.content = return_value + + request = spanner.BeginTransactionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transaction.Transaction() + post_with_metadata.return_value = transaction.Transaction(), metadata + + client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_commit_rest_bad_request(request_type=spanner.CommitRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.commit(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.CommitRequest, + dict, +]) +def test_commit_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = commit_response.CommitResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = commit_response.CommitResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.commit(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, commit_response.CommitResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_commit") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_commit_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_commit") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.CommitRequest.pb(spanner.CommitRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = commit_response.CommitResponse.to_json(commit_response.CommitResponse()) + req.return_value.content = return_value + + request = spanner.CommitRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = commit_response.CommitResponse() + post_with_metadata.return_value = commit_response.CommitResponse(), metadata + + client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_rollback_rest_bad_request(request_type=spanner.RollbackRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.rollback(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.RollbackRequest, + dict, +]) +def test_rollback_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.rollback(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_rollback") as pre: + pre.assert_not_called() + pb_message = spanner.RollbackRequest.pb(spanner.RollbackRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner.RollbackRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_partition_query_rest_bad_request(request_type=spanner.PartitionQueryRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.partition_query(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionQueryRequest, + dict, +]) +def test_partition_query_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.partition_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_partition_query_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_query") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.PartitionQueryRequest.pb(spanner.PartitionQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) + req.return_value.content = return_value + + request = spanner.PartitionQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.PartitionResponse() + post_with_metadata.return_value = spanner.PartitionResponse(), metadata + + client.partition_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_partition_read_rest_bad_request(request_type=spanner.PartitionReadRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.partition_read(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.PartitionReadRequest, + dict, +]) +def test_partition_read_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.PartitionResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.PartitionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.partition_read(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.PartitionResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_partition_read_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_read") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.PartitionReadRequest.pb(spanner.PartitionReadRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) + req.return_value.content = return_value + + request = spanner.PartitionReadRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.PartitionResponse() + post_with_metadata.return_value = spanner.PartitionResponse(), metadata + + client.partition_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_write_rest_bad_request(request_type=spanner.BatchWriteRequest): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_write(request) + + +@pytest.mark.parametrize("request_type", [ + spanner.BatchWriteRequest, + dict, +]) +def test_batch_write_rest_call_success(request_type): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner.BatchWriteResponse( + indexes=[752], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner.BatchWriteResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_write(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner.BatchWriteResponse) + assert response.indexes == [752] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_write_rest_interceptors(null_interceptor): + transport = transports.SpannerRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), + ) + client = SpannerClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write") as post, \ + mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_write") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner.BatchWriteRequest.pb(spanner.BatchWriteRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner.BatchWriteResponse.to_json(spanner.BatchWriteResponse()) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = spanner.BatchWriteRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner.BatchWriteResponse() + post_with_metadata.return_value = spanner.BatchWriteResponse(), metadata + + client.batch_write(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +def test_initialize_client_w_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_session_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_session), + '__call__') as call: + client.create_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CreateSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_create_sessions_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_sessions), + '__call__') as call: + client.batch_create_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchCreateSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_session_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_session), + '__call__') as call: + client.get_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.GetSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sessions_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), + '__call__') as call: + client.list_sessions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ListSessionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_session_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_session), + '__call__') as call: + client.delete_session(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.DeleteSessionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_sql_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_sql), + '__call__') as call: + client.execute_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_streaming_sql_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_streaming_sql), + '__call__') as call: + client.execute_streaming_sql(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteSqlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_execute_batch_dml_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.execute_batch_dml), + '__call__') as call: + client.execute_batch_dml(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ExecuteBatchDmlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_read_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.read), + '__call__') as call: + client.read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_streaming_read_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.streaming_read), + '__call__') as call: + client.streaming_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.ReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_begin_transaction_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.begin_transaction), + '__call__') as call: + client.begin_transaction(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BeginTransactionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.commit), + '__call__') as call: + client.commit(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.CommitRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.rollback), + '__call__') as call: + client.rollback(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.RollbackRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_query_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_query), + '__call__') as call: + client.partition_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_partition_read_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.partition_read), + '__call__') as call: + client.partition_read(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.PartitionReadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_write_empty_call_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_write), + '__call__') as call: + client.batch_write(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner.BatchWriteRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SpannerGrpcTransport, + ) + +def test_spanner_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SpannerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_spanner_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SpannerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_session', + 'batch_create_sessions', + 'get_session', + 'list_sessions', + 'delete_session', + 'execute_sql', + 'execute_streaming_sql', + 'execute_batch_dml', + 'read', + 'streaming_read', + 'begin_transaction', + 'commit', + 'rollback', + 'partition_query', + 'partition_read', + 'batch_write', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_spanner_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpannerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + quota_project_id="octopus", + ) + + +def test_spanner_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SpannerTransport() + adc.assert_called_once() + + +def test_spanner_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SpannerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + ], +) +def test_spanner_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.data',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpannerGrpcTransport, + transports.SpannerGrpcAsyncIOTransport, + transports.SpannerRestTransport, + ], +) +def test_spanner_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SpannerGrpcTransport, grpc_helpers), + (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_spanner_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.data', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_spanner_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SpannerRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_spanner_host_no_port(transport_name): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_spanner_host_with_port(transport_name): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_spanner_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SpannerClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SpannerClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_session._session + session2 = client2.transport.create_session._session + assert session1 != session2 + session1 = client1.transport.batch_create_sessions._session + session2 = client2.transport.batch_create_sessions._session + assert session1 != session2 + session1 = client1.transport.get_session._session + session2 = client2.transport.get_session._session + assert session1 != session2 + session1 = client1.transport.list_sessions._session + session2 = client2.transport.list_sessions._session + assert session1 != session2 + session1 = client1.transport.delete_session._session + session2 = client2.transport.delete_session._session + assert session1 != session2 + session1 = client1.transport.execute_sql._session + session2 = client2.transport.execute_sql._session + assert session1 != session2 + session1 = client1.transport.execute_streaming_sql._session + session2 = client2.transport.execute_streaming_sql._session + assert session1 != session2 + session1 = client1.transport.execute_batch_dml._session + session2 = client2.transport.execute_batch_dml._session + assert session1 != session2 + session1 = client1.transport.read._session + session2 = client2.transport.read._session + assert session1 != session2 + session1 = client1.transport.streaming_read._session + session2 = client2.transport.streaming_read._session + assert session1 != session2 + session1 = client1.transport.begin_transaction._session + session2 = client2.transport.begin_transaction._session + assert session1 != session2 + session1 = client1.transport.commit._session + session2 = client2.transport.commit._session + assert session1 != session2 + session1 = client1.transport.rollback._session + session2 = client2.transport.rollback._session + assert session1 != session2 + session1 = client1.transport.partition_query._session + session2 = client2.transport.partition_query._session + assert session1 != session2 + session1 = client1.transport.partition_read._session + session2 = client2.transport.partition_read._session + assert session1 != session2 + session1 = client1.transport.batch_write._session + session2 = client2.transport.batch_write._session + assert session1 != session2 +def test_spanner_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpannerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_spanner_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SpannerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) +def test_spanner_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_database_path(): + project = "squid" + instance = "clam" + database = "whelk" + expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + actual = SpannerClient.database_path(project, instance, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "database": "nudibranch", + } + path = SpannerClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_database_path(path) + assert expected == actual + +def test_session_path(): + project = "cuttlefish" + instance = "mussel" + database = "winkle" + session = "nautilus" + expected = "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) + actual = SpannerClient.session_path(project, instance, database, session) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "scallop", + "instance": "abalone", + "database": "squid", + "session": "clam", + } + path = SpannerClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_session_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SpannerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SpannerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = SpannerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SpannerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SpannerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SpannerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = SpannerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SpannerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SpannerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SpannerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SpannerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: + transport_class = SpannerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_grpc(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SpannerAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = SpannerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SpannerClient, transports.SpannerGrpcTransport), + (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/spanner_admin_database/v1/.coveragerc b/owl-bot-staging/spanner_admin_database/v1/.coveragerc new file mode 100644 index 0000000000..6085d54da4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner_admin_database/__init__.py + google/cloud/spanner_admin_database/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/spanner_admin_database/v1/.flake8 b/owl-bot-staging/spanner_admin_database/v1/.flake8 new file mode 100644 index 0000000000..90316de214 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/spanner_admin_database/v1/LICENSE b/owl-bot-staging/spanner_admin_database/v1/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in new file mode 100644 index 0000000000..dae249ec89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_database/v1/README.rst b/owl-bot-staging/spanner_admin_database/v1/README.rst new file mode 100644 index 0000000000..5cfc4ab969 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Spanner Admin Database API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner Admin Database API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css new file mode 100644 index 0000000000..b0a295464b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html new file mode 100644 index 0000000000..95e9c77fcf --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py new file mode 100644 index 0000000000..4b16cc5e97 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner-admin-database documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner-admin-database" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-database", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-admin-database-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-database.tex", + u"google-cloud-spanner-admin-database Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-spanner-admin-database", + "google-cloud-spanner-admin-database Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-database", + "google-cloud-spanner-admin-database Documentation", + author, + "google-cloud-spanner-admin-database", + "google-cloud-spanner-admin-database Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst new file mode 100644 index 0000000000..6f79e3f3d4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_admin_database_v1/services_ + spanner_admin_database_v1/types_ diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst new file mode 100644 index 0000000000..536d17b2ea --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst new file mode 100644 index 0000000000..bd6aab00e4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst @@ -0,0 +1,10 @@ +DatabaseAdmin +------------------------------- + +.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst new file mode 100644 index 0000000000..55e57d8dc0 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner Admin Database v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + database_admin diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst new file mode 100644 index 0000000000..fe6c27778b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Spanner Admin Database v1 API +==================================================== + +.. automodule:: google.cloud.spanner_admin_database_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py new file mode 100644 index 0000000000..aa445b5622 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_database import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.spanner_admin_database_v1.services.database_admin.client import DatabaseAdminClient +from google.cloud.spanner_admin_database_v1.services.database_admin.async_client import DatabaseAdminAsyncClient + +from google.cloud.spanner_admin_database_v1.types.backup import Backup +from google.cloud.spanner_admin_database_v1.types.backup import BackupInfo +from google.cloud.spanner_admin_database_v1.types.backup import BackupInstancePartition +from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupMetadata +from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupMetadata +from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import DeleteBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import FullBackupSpec +from google.cloud.spanner_admin_database_v1.types.backup import GetBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup import IncrementalBackupSpec +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsResponse +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsRequest +from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsResponse +from google.cloud.spanner_admin_database_v1.types.backup import UpdateBackupRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupSchedule +from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupScheduleSpec +from google.cloud.spanner_admin_database_v1.types.backup_schedule import CreateBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import CrontabSpec +from google.cloud.spanner_admin_database_v1.types.backup_schedule import DeleteBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import GetBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesRequest +from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesResponse +from google.cloud.spanner_admin_database_v1.types.backup_schedule import UpdateBackupScheduleRequest +from google.cloud.spanner_admin_database_v1.types.common import EncryptionConfig +from google.cloud.spanner_admin_database_v1.types.common import EncryptionInfo +from google.cloud.spanner_admin_database_v1.types.common import OperationProgress +from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import Database +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DatabaseRole +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DdlStatementActionInfo +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DropDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesResponse +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import OptimizeRestoredDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseEncryptionConfig +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreInfo +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import SplitPoints +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseMetadata +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseRequest +from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreSourceType + +__all__ = ('DatabaseAdminClient', + 'DatabaseAdminAsyncClient', + 'Backup', + 'BackupInfo', + 'BackupInstancePartition', + 'CopyBackupEncryptionConfig', + 'CopyBackupMetadata', + 'CopyBackupRequest', + 'CreateBackupEncryptionConfig', + 'CreateBackupMetadata', + 'CreateBackupRequest', + 'DeleteBackupRequest', + 'FullBackupSpec', + 'GetBackupRequest', + 'IncrementalBackupSpec', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'UpdateBackupRequest', + 'BackupSchedule', + 'BackupScheduleSpec', + 'CreateBackupScheduleRequest', + 'CrontabSpec', + 'DeleteBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'UpdateBackupScheduleRequest', + 'EncryptionConfig', + 'EncryptionInfo', + 'OperationProgress', + 'DatabaseDialect', + 'AddSplitPointsRequest', + 'AddSplitPointsResponse', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'Database', + 'DatabaseRole', + 'DdlStatementActionInfo', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'GetDatabaseRequest', + 'InternalUpdateGraphOperationRequest', + 'InternalUpdateGraphOperationResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'ListDatabaseRolesRequest', + 'ListDatabaseRolesResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'OptimizeRestoredDatabaseMetadata', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'RestoreDatabaseRequest', + 'RestoreInfo', + 'SplitPoints', + 'UpdateDatabaseDdlMetadata', + 'UpdateDatabaseDdlRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseRequest', + 'RestoreSourceType', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed new file mode 100644 index 0000000000..29f334aad6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py new file mode 100644 index 0000000000..6605eff74d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.database_admin import DatabaseAdminClient +from .services.database_admin import DatabaseAdminAsyncClient + +from .types.backup import Backup +from .types.backup import BackupInfo +from .types.backup import BackupInstancePartition +from .types.backup import CopyBackupEncryptionConfig +from .types.backup import CopyBackupMetadata +from .types.backup import CopyBackupRequest +from .types.backup import CreateBackupEncryptionConfig +from .types.backup import CreateBackupMetadata +from .types.backup import CreateBackupRequest +from .types.backup import DeleteBackupRequest +from .types.backup import FullBackupSpec +from .types.backup import GetBackupRequest +from .types.backup import IncrementalBackupSpec +from .types.backup import ListBackupOperationsRequest +from .types.backup import ListBackupOperationsResponse +from .types.backup import ListBackupsRequest +from .types.backup import ListBackupsResponse +from .types.backup import UpdateBackupRequest +from .types.backup_schedule import BackupSchedule +from .types.backup_schedule import BackupScheduleSpec +from .types.backup_schedule import CreateBackupScheduleRequest +from .types.backup_schedule import CrontabSpec +from .types.backup_schedule import DeleteBackupScheduleRequest +from .types.backup_schedule import GetBackupScheduleRequest +from .types.backup_schedule import ListBackupSchedulesRequest +from .types.backup_schedule import ListBackupSchedulesResponse +from .types.backup_schedule import UpdateBackupScheduleRequest +from .types.common import EncryptionConfig +from .types.common import EncryptionInfo +from .types.common import OperationProgress +from .types.common import DatabaseDialect +from .types.spanner_database_admin import AddSplitPointsRequest +from .types.spanner_database_admin import AddSplitPointsResponse +from .types.spanner_database_admin import CreateDatabaseMetadata +from .types.spanner_database_admin import CreateDatabaseRequest +from .types.spanner_database_admin import Database +from .types.spanner_database_admin import DatabaseRole +from .types.spanner_database_admin import DdlStatementActionInfo +from .types.spanner_database_admin import DropDatabaseRequest +from .types.spanner_database_admin import GetDatabaseDdlRequest +from .types.spanner_database_admin import GetDatabaseDdlResponse +from .types.spanner_database_admin import GetDatabaseRequest +from .types.spanner_database_admin import InternalUpdateGraphOperationRequest +from .types.spanner_database_admin import InternalUpdateGraphOperationResponse +from .types.spanner_database_admin import ListDatabaseOperationsRequest +from .types.spanner_database_admin import ListDatabaseOperationsResponse +from .types.spanner_database_admin import ListDatabaseRolesRequest +from .types.spanner_database_admin import ListDatabaseRolesResponse +from .types.spanner_database_admin import ListDatabasesRequest +from .types.spanner_database_admin import ListDatabasesResponse +from .types.spanner_database_admin import OptimizeRestoredDatabaseMetadata +from .types.spanner_database_admin import RestoreDatabaseEncryptionConfig +from .types.spanner_database_admin import RestoreDatabaseMetadata +from .types.spanner_database_admin import RestoreDatabaseRequest +from .types.spanner_database_admin import RestoreInfo +from .types.spanner_database_admin import SplitPoints +from .types.spanner_database_admin import UpdateDatabaseDdlMetadata +from .types.spanner_database_admin import UpdateDatabaseDdlRequest +from .types.spanner_database_admin import UpdateDatabaseMetadata +from .types.spanner_database_admin import UpdateDatabaseRequest +from .types.spanner_database_admin import RestoreSourceType + +__all__ = ( + 'DatabaseAdminAsyncClient', +'AddSplitPointsRequest', +'AddSplitPointsResponse', +'Backup', +'BackupInfo', +'BackupInstancePartition', +'BackupSchedule', +'BackupScheduleSpec', +'CopyBackupEncryptionConfig', +'CopyBackupMetadata', +'CopyBackupRequest', +'CreateBackupEncryptionConfig', +'CreateBackupMetadata', +'CreateBackupRequest', +'CreateBackupScheduleRequest', +'CreateDatabaseMetadata', +'CreateDatabaseRequest', +'CrontabSpec', +'Database', +'DatabaseAdminClient', +'DatabaseDialect', +'DatabaseRole', +'DdlStatementActionInfo', +'DeleteBackupRequest', +'DeleteBackupScheduleRequest', +'DropDatabaseRequest', +'EncryptionConfig', +'EncryptionInfo', +'FullBackupSpec', +'GetBackupRequest', +'GetBackupScheduleRequest', +'GetDatabaseDdlRequest', +'GetDatabaseDdlResponse', +'GetDatabaseRequest', +'IncrementalBackupSpec', +'InternalUpdateGraphOperationRequest', +'InternalUpdateGraphOperationResponse', +'ListBackupOperationsRequest', +'ListBackupOperationsResponse', +'ListBackupSchedulesRequest', +'ListBackupSchedulesResponse', +'ListBackupsRequest', +'ListBackupsResponse', +'ListDatabaseOperationsRequest', +'ListDatabaseOperationsResponse', +'ListDatabaseRolesRequest', +'ListDatabaseRolesResponse', +'ListDatabasesRequest', +'ListDatabasesResponse', +'OperationProgress', +'OptimizeRestoredDatabaseMetadata', +'RestoreDatabaseEncryptionConfig', +'RestoreDatabaseMetadata', +'RestoreDatabaseRequest', +'RestoreInfo', +'RestoreSourceType', +'SplitPoints', +'UpdateBackupRequest', +'UpdateBackupScheduleRequest', +'UpdateDatabaseDdlMetadata', +'UpdateDatabaseDdlRequest', +'UpdateDatabaseMetadata', +'UpdateDatabaseRequest', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json new file mode 100644 index 0000000000..027a4f612b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json @@ -0,0 +1,433 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_admin_database_v1", + "protoPackage": "google.spanner.admin.database.v1", + "schema": "1.0", + "services": { + "DatabaseAdmin": { + "clients": { + "grpc": { + "libraryClient": "DatabaseAdminClient", + "rpcs": { + "AddSplitPoints": { + "methods": [ + "add_split_points" + ] + }, + "CopyBackup": { + "methods": [ + "copy_backup" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "InternalUpdateGraphOperation": { + "methods": [ + "internal_update_graph_operation" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabaseRoles": { + "methods": [ + "list_database_roles" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DatabaseAdminAsyncClient", + "rpcs": { + "AddSplitPoints": { + "methods": [ + "add_split_points" + ] + }, + "CopyBackup": { + "methods": [ + "copy_backup" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "InternalUpdateGraphOperation": { + "methods": [ + "internal_update_graph_operation" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabaseRoles": { + "methods": [ + "list_database_roles" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + }, + "rest": { + "libraryClient": "DatabaseAdminClient", + "rpcs": { + "AddSplitPoints": { + "methods": [ + "add_split_points" + ] + }, + "CopyBackup": { + "methods": [ + "copy_backup" + ] + }, + "CreateBackup": { + "methods": [ + "create_backup" + ] + }, + "CreateBackupSchedule": { + "methods": [ + "create_backup_schedule" + ] + }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, + "DeleteBackup": { + "methods": [ + "delete_backup" + ] + }, + "DeleteBackupSchedule": { + "methods": [ + "delete_backup_schedule" + ] + }, + "DropDatabase": { + "methods": [ + "drop_database" + ] + }, + "GetBackup": { + "methods": [ + "get_backup" + ] + }, + "GetBackupSchedule": { + "methods": [ + "get_backup_schedule" + ] + }, + "GetDatabase": { + "methods": [ + "get_database" + ] + }, + "GetDatabaseDdl": { + "methods": [ + "get_database_ddl" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "InternalUpdateGraphOperation": { + "methods": [ + "internal_update_graph_operation" + ] + }, + "ListBackupOperations": { + "methods": [ + "list_backup_operations" + ] + }, + "ListBackupSchedules": { + "methods": [ + "list_backup_schedules" + ] + }, + "ListBackups": { + "methods": [ + "list_backups" + ] + }, + "ListDatabaseOperations": { + "methods": [ + "list_database_operations" + ] + }, + "ListDatabaseRoles": { + "methods": [ + "list_database_roles" + ] + }, + "ListDatabases": { + "methods": [ + "list_databases" + ] + }, + "RestoreDatabase": { + "methods": [ + "restore_database" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateBackup": { + "methods": [ + "update_backup" + ] + }, + "UpdateBackupSchedule": { + "methods": [ + "update_backup_schedule" + ] + }, + "UpdateDatabase": { + "methods": [ + "update_database" + ] + }, + "UpdateDatabaseDdl": { + "methods": [ + "update_database_ddl" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed new file mode 100644 index 0000000000..29f334aad6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py new file mode 100644 index 0000000000..cbf94b283c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py new file mode 100644 index 0000000000..85e225bbd8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DatabaseAdminClient +from .async_client import DatabaseAdminAsyncClient + +__all__ = ( + 'DatabaseAdminClient', + 'DatabaseAdminAsyncClient', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py new file mode 100644 index 0000000000..b5e733e3a6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py @@ -0,0 +1,3968 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +import uuid + +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .client import DatabaseAdminClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class DatabaseAdminAsyncClient: + """Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + """ + + _client: DatabaseAdminClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DatabaseAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DatabaseAdminClient._DEFAULT_UNIVERSE + + backup_path = staticmethod(DatabaseAdminClient.backup_path) + parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) + backup_schedule_path = staticmethod(DatabaseAdminClient.backup_schedule_path) + parse_backup_schedule_path = staticmethod(DatabaseAdminClient.parse_backup_schedule_path) + crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) + crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) + parse_crypto_key_version_path = staticmethod(DatabaseAdminClient.parse_crypto_key_version_path) + database_path = staticmethod(DatabaseAdminClient.database_path) + parse_database_path = staticmethod(DatabaseAdminClient.parse_database_path) + database_role_path = staticmethod(DatabaseAdminClient.database_role_path) + parse_database_role_path = staticmethod(DatabaseAdminClient.parse_database_role_path) + instance_path = staticmethod(DatabaseAdminClient.instance_path) + parse_instance_path = staticmethod(DatabaseAdminClient.parse_instance_path) + instance_partition_path = staticmethod(DatabaseAdminClient.instance_partition_path) + parse_instance_partition_path = staticmethod(DatabaseAdminClient.parse_instance_partition_path) + common_billing_account_path = staticmethod(DatabaseAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DatabaseAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DatabaseAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(DatabaseAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(DatabaseAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(DatabaseAdminClient.parse_common_organization_path) + common_project_path = staticmethod(DatabaseAdminClient.common_project_path) + parse_common_project_path = staticmethod(DatabaseAdminClient.parse_common_project_path) + common_location_path = staticmethod(DatabaseAdminClient.common_location_path) + parse_common_location_path = staticmethod(DatabaseAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminAsyncClient: The constructed client. + """ + return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DatabaseAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DatabaseAdminTransport: + """Returns the transport used by the client instance. + + Returns: + DatabaseAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DatabaseAdminClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the database admin async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatabaseAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DatabaseAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.database_v1.DatabaseAdminAsyncClient`.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "credentialsType": None, + } + ) + + async def list_databases(self, + request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabasesAsyncPager: + r"""Lists Cloud Spanner databases. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]]): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + parent (:class:`str`): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabasesRequest): + request = spanner_database_admin.ListDatabasesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabasesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_database(self, + request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + create_statement: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]]): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + parent (:class:`str`): + Required. The name of the instance that will serve the + new database. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + create_statement (:class:`str`): + Required. A ``CREATE DATABASE`` statement, which + specifies the ID of the new database. The database ID + must conform to the regular expression + ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 + characters in length. If the database ID is a reserved + word or if it contains a hyphen, the database ID must be + enclosed in backticks (:literal:`\``). + + This corresponds to the ``create_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, create_statement] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): + request = spanner_database_admin.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if create_statement is not None: + request.create_statement = create_statement + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def get_database(self, + request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.Database: + r"""Gets the state of a Cloud Spanner database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]]): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + name (:class:`str`): + Required. The name of the requested database. Values are + of the form + ``projects//instances//databases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Database: + A Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseRequest): + request = spanner_database_admin.GetDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_database(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[spanner_database_admin.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]]): + The request object. The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + database (:class:`google.cloud.spanner_admin_database_v1.types.Database`): + Required. The database to update. The ``name`` field of + the database is of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, only + ``enable_drop_protection`` field can be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): + request = spanner_database_admin.UpdateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database.name", request.database.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.UpdateDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def update_database_ddl(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + statements: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]]): + The request object. Enqueues the given DDL statements to be applied, in + order but not necessarily all at once, to the database + schema at some point (or points) in the future. The + server checks that the statements are executable + (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + ``NULL`` value in a column to which ``NOT NULL`` would + be added). If a statement fails, all subsequent + statements in the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + database (:class:`str`): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + statements (:class:`MutableSequence[str]`): + Required. DDL statements to be + applied to the database. + + This corresponds to the ``statements`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, statements] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): + request = spanner_database_admin.UpdateDatabaseDdlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if statements: + request.statements.extend(statements) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, + ) + + # Done; return the response. + return response + + async def drop_database(self, + request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + await client.drop_database(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]]): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + database (:class:`str`): + Required. The database to be dropped. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.DropDatabaseRequest): + request = spanner_database_admin.DropDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.drop_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_database_ddl(self, + request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = await client.get_database_ddl(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]]): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + database (:class:`str`): + Required. The database whose schema we wish to get. + Values are of the form + ``projects//instances//databases/`` + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): + request = spanner_database_admin.GetDatabaseDdlRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup(self, + request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[gsad_backup.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]]): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + parent (:class:`str`): + Required. The name of the instance in which the backup + will be created. This must be the same instance that + contains the database the backup will be created from. + The backup will be stored in the location(s) specified + in the instance configuration of this instance. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): + Required. The backup to create. + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full + backup name of the form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup, backup_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.CreateBackupRequest): + request = gsad_backup.CreateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gsad_backup.Backup, + metadata_type=gsad_backup.CreateBackupMetadata, + ) + + # Done; return the response. + return response + + async def copy_backup(self, + request: Optional[Union[backup.CopyBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_id: Optional[str] = None, + source_backup: Optional[str] = None, + expire_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]]): + The request object. The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + parent (:class:`str`): + Required. The name of the destination instance that will + contain the backup copy. Values are of the form: + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (:class:`str`): + Required. The id of the backup copy. The ``backup_id`` + appended to ``parent`` forms the full backup_uri of the + form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source_backup (:class:`str`): + Required. The source backup to be copied. The source + backup needs to be in READY state for it to be copied. + Once CopyBackup is in progress, the source backup cannot + be deleted or cleaned up on expiration until CopyBackup + is finished. Values are of the form: + ``projects//instances//backups/``. + + This corresponds to the ``source_backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + expire_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Required. The expiration time of the backup in + microsecond granularity. The expiration time must be at + least 6 hours and at most 366 days from the + ``create_time`` of the source backup. Once the + ``expire_time`` has passed, the backup is eligible to be + automatically deleted by Cloud Spanner to free the + resources used by the backup. + + This corresponds to the ``expire_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_id, source_backup, expire_time] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.CopyBackupRequest): + request = backup.CopyBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_id is not None: + request.backup_id = backup_id + if source_backup is not None: + request.source_backup = source_backup + if expire_time is not None: + request.expire_time = expire_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.copy_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backup.Backup, + metadata_type=backup.CopyBackupMetadata, + ) + + # Done; return the response. + return response + + async def get_backup(self, + request: Optional[Union[backup.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup.Backup: + r"""Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]]): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + name (:class:`str`): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.GetBackupRequest): + request = backup.GetBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup(self, + request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[gsad_backup.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup.Backup: + r"""Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = await client.update_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]]): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only + supported for the following fields: + + - ``backup.expire_time``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be + updated. This mask is relative to the Backup resource, + not to the request message. The field mask must always + be specified; this prevents any future fields from being + erased accidentally by clients that do not know about + them. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.UpdateBackupRequest): + request = gsad_backup.UpdateBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup.name", request.backup.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup(self, + request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]]): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + name (:class:`str`): + Required. Name of the backup to delete. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.DeleteBackupRequest): + request = backup.DeleteBackupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_backups(self, + request: Optional[Union[backup.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupsAsyncPager: + r"""Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]]): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + parent (:class:`str`): + Required. The instance to list backups from. Values are + of the form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupsRequest): + request = backup.ListBackupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def restore_database(self, + request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database_id: Optional[str] = None, + backup: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]]): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + parent (:class:`str`): + Required. The name of the instance in which to create + the restored database. This instance must be in the same + project and have the same instance configuration as the + instance containing the source backup. Values are of the + form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. The id of the database to create and restore + to. This database must not already exist. The + ``database_id`` appended to ``parent`` forms the full + database name of the form + ``projects//instances//databases/``. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (:class:`str`): + Name of the backup from which to restore. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database_id, backup] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): + request = spanner_database_admin.RestoreDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database_id is not None: + request.database_id = database_id + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.restore_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + async def list_database_operations(self, + request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseOperationsAsyncPager: + r"""Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]]): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + parent (:class:`str`): + Required. The instance of the database operations. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): + request = spanner_database_admin.ListDatabaseOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabaseOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_backup_operations(self, + request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupOperationsAsyncPager: + r"""Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]]): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + parent (:class:`str`): + Required. The instance of the backup operations. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupOperationsRequest): + request = backup.ListBackupOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_database_roles(self, + request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseRolesAsyncPager: + r"""Lists Cloud Spanner database roles. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]]): + The request object. The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + parent (:class:`str`): + Required. The database whose roles should be listed. + Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager: + The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): + request = spanner_database_admin.ListDatabaseRolesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_roles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatabaseRolesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def add_split_points(self, + request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, + *, + database: Optional[str] = None, + split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.AddSplitPointsResponse: + r"""Adds split points to specified tables, indexes of a + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = await client.add_split_points(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]]): + The request object. The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + database (:class:`str`): + Required. The database on whose tables/indexes split + points are to be added. Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + split_points (:class:`MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]`): + Required. The split points to add. + This corresponds to the ``split_points`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: + The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, split_points] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): + request = spanner_database_admin.AddSplitPointsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if split_points: + request.split_points.extend(split_points) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.add_split_points] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + backup_schedule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Creates a new backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]]): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + parent (:class:`str`): + Required. The name of the database + that this backup schedule applies to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule_id (:class:`str`): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the + full backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``backup_schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_schedule, backup_schedule_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): + request = gsad_backup_schedule.CreateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if backup_schedule_id is not None: + request.backup_schedule_id = backup_schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_backup_schedule(self, + request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup_schedule.BackupSchedule: + r"""Gets backup schedule for the input schedule name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]]): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + name (:class:`str`): + Required. The name of the schedule to retrieve. Values + are of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.GetBackupScheduleRequest): + request = backup_schedule.GetBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, + *, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]]): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated + as specified by ``update_mask`` are required. Other + fields are ignored. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which + fields in the BackupSchedule resource + should be updated. This mask is relative + to the BackupSchedule resource, not to + the request message. The field mask must + always be specified; this prevents any + future fields from being erased + accidentally. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_schedule, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): + request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup_schedule.name", request.backup_schedule.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_backup_schedule(self, + request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]]): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + name (:class:`str`): + Required. The name of the schedule to delete. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): + request = backup_schedule.DeleteBackupScheduleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_backup_schedules(self, + request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupSchedulesAsyncPager: + r"""Lists all the backup schedules for the database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]]): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + parent (:class:`str`): + Required. Database is the parent + resource whose backup schedules should + be listed. Values are of the form + projects//instances//databases/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): + request = backup_schedule.ListBackupSchedulesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupSchedulesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def internal_update_graph_operation(self, + request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, + *, + database: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: + r"""This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + async def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = await client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]]): + The request object. Internal request proto, do not use + directly. + database (:class:`str`): + Internal field, do not use directly. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (:class:`str`): + Internal field, do not use directly. + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: + Internal response proto, do not use + directly. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, operation_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): + request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.internal_update_graph_operation] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "DatabaseAdminAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "DatabaseAdminAsyncClient", +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py new file mode 100644 index 0000000000..38e3050e48 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py @@ -0,0 +1,4387 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import uuid +import warnings + +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DatabaseAdminGrpcTransport +from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .transports.rest import DatabaseAdminRestTransport + + +class DatabaseAdminClientMeta(type): + """Metaclass for the DatabaseAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] + _transport_registry["grpc"] = DatabaseAdminGrpcTransport + _transport_registry["grpc_asyncio"] = DatabaseAdminGrpcAsyncIOTransport + _transport_registry["rest"] = DatabaseAdminRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DatabaseAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DatabaseAdminClient(metaclass=DatabaseAdminClientMeta): + """Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DatabaseAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DatabaseAdminTransport: + """Returns the transport used by the client instance. + + Returns: + DatabaseAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def backup_path(project: str,instance: str,backup: str,) -> str: + """Returns a fully-qualified backup string.""" + return "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) + + @staticmethod + def parse_backup_path(path: str) -> Dict[str,str]: + """Parses a backup path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/backups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def backup_schedule_path(project: str,instance: str,database: str,schedule: str,) -> str: + """Returns a fully-qualified backup_schedule string.""" + return "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) + + @staticmethod + def parse_backup_schedule_path(path: str) -> Dict[str,str]: + """Parses a backup_schedule path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_version_path(project: str,location: str,key_ring: str,crypto_key: str,crypto_key_version: str,) -> str: + """Returns a fully-qualified crypto_key_version string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) + + @staticmethod + def parse_crypto_key_version_path(path: str) -> Dict[str,str]: + """Parses a crypto_key_version path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def database_path(project: str,instance: str,database: str,) -> str: + """Returns a fully-qualified database string.""" + return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + + @staticmethod + def parse_database_path(path: str) -> Dict[str,str]: + """Parses a database path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def database_role_path(project: str,instance: str,database: str,role: str,) -> str: + """Returns a fully-qualified database_role string.""" + return "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) + + @staticmethod + def parse_database_role_path(path: str) -> Dict[str,str]: + """Parses a database_role path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/databaseRoles/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_path(project: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: + """Returns a fully-qualified instance_partition string.""" + return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + + @staticmethod + def parse_instance_partition_path(path: str) -> Dict[str,str]: + """Parses a instance_partition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DatabaseAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the database admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DatabaseAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DatabaseAdminClient._read_environment_variables() + self._client_cert_source = DatabaseAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = DatabaseAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DatabaseAdminTransport) + if transport_provided: + # transport is a DatabaseAdminTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DatabaseAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + DatabaseAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[DatabaseAdminTransport], Callable[..., DatabaseAdminTransport]] = ( + DatabaseAdminClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DatabaseAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.database_v1.DatabaseAdminClient`.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "credentialsType": None, + } + ) + + def list_databases(self, + request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabasesPager: + r"""Lists Cloud Spanner databases. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + parent (str): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabasesRequest): + request = spanner_database_admin.ListDatabasesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_databases] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabasesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_database(self, + request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + create_statement: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + parent (str): + Required. The name of the instance that will serve the + new database. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + create_statement (str): + Required. A ``CREATE DATABASE`` statement, which + specifies the ID of the new database. The database ID + must conform to the regular expression + ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 + characters in length. If the database ID is a reserved + word or if it contains a hyphen, the database ID must be + enclosed in backticks (:literal:`\``). + + This corresponds to the ``create_statement`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, create_statement] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): + request = spanner_database_admin.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if create_statement is not None: + request.create_statement = create_statement + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.CreateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def get_database(self, + request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.Database: + r"""Gets the state of a Cloud Spanner database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + name (str): + Required. The name of the requested database. Values are + of the form + ``projects//instances//databases/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Database: + A Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseRequest): + request = spanner_database_admin.GetDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_database(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, + *, + database: Optional[spanner_database_admin.Database] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]): + The request object. The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + database (google.cloud.spanner_admin_database_v1.types.Database): + Required. The database to update. The ``name`` field of + the database is of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, only + ``enable_drop_protection`` field can be updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): + request = spanner_database_admin.UpdateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database.name", request.database.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.UpdateDatabaseMetadata, + ) + + # Done; return the response. + return response + + def update_database_ddl(self, + request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + statements: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): + The request object. Enqueues the given DDL statements to be applied, in + order but not necessarily all at once, to the database + schema at some point (or points) in the future. The + server checks that the statements are executable + (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + ``NULL`` value in a column to which ``NOT NULL`` would + be added). If a statement fails, all subsequent + statements in the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + database (str): + Required. The database to update. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + statements (MutableSequence[str]): + Required. DDL statements to be + applied to the database. + + This corresponds to the ``statements`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, statements] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): + request = spanner_database_admin.UpdateDatabaseDdlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if statements is not None: + request.statements = statements + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, + ) + + # Done; return the response. + return response + + def drop_database(self, + request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + client.drop_database(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + database (str): + Required. The database to be dropped. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.DropDatabaseRequest): + request = spanner_database_admin.DropDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.drop_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_database_ddl(self, + request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, + *, + database: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = client.get_database_ddl(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + database (str): + Required. The database whose schema we wish to get. + Values are of the form + ``projects//instances//databases/`` + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): + request = spanner_database_admin.GetDatabaseDdlRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_database_ddl] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup(self, + request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup: Optional[gsad_backup.Backup] = None, + backup_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + parent (str): + Required. The name of the instance in which the backup + will be created. This must be the same instance that + contains the database the backup will be created from. + The backup will be stored in the location(s) specified + in the instance configuration of this instance. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to create. + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full + backup name of the form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup, backup_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.CreateBackupRequest): + request = gsad_backup.CreateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup is not None: + request.backup = backup + if backup_id is not None: + request.backup_id = backup_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gsad_backup.Backup, + metadata_type=gsad_backup.CreateBackupMetadata, + ) + + # Done; return the response. + return response + + def copy_backup(self, + request: Optional[Union[backup.CopyBackupRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_id: Optional[str] = None, + source_backup: Optional[str] = None, + expire_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]): + The request object. The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + parent (str): + Required. The name of the destination instance that will + contain the backup copy. Values are of the form: + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_id (str): + Required. The id of the backup copy. The ``backup_id`` + appended to ``parent`` forms the full backup_uri of the + form + ``projects//instances//backups/``. + + This corresponds to the ``backup_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + source_backup (str): + Required. The source backup to be copied. The source + backup needs to be in READY state for it to be copied. + Once CopyBackup is in progress, the source backup cannot + be deleted or cleaned up on expiration until CopyBackup + is finished. Values are of the form: + ``projects//instances//backups/``. + + This corresponds to the ``source_backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The expiration time of the backup in + microsecond granularity. The expiration time must be at + least 6 hours and at most 366 days from the + ``create_time`` of the source backup. Once the + ``expire_time`` has passed, the backup is eligible to be + automatically deleted by Cloud Spanner to free the + resources used by the backup. + + This corresponds to the ``expire_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Backup` + A backup of a Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_id, source_backup, expire_time] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.CopyBackupRequest): + request = backup.CopyBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_id is not None: + request.backup_id = backup_id + if source_backup is not None: + request.source_backup = source_backup + if expire_time is not None: + request.expire_time = expire_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.copy_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backup.Backup, + metadata_type=backup.CopyBackupMetadata, + ) + + # Done; return the response. + return response + + def get_backup(self, + request: Optional[Union[backup.GetBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup.Backup: + r"""Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + name (str): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.GetBackupRequest): + request = backup.GetBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup(self, + request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, + *, + backup: Optional[gsad_backup.Backup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup.Backup: + r"""Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = client.update_backup(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only + supported for the following fields: + + - ``backup.expire_time``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be + updated. This mask is relative to the Backup resource, + not to the request message. The field mask must always + be specified; this prevents any future fields from being + erased accidentally by clients that do not know about + them. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.Backup: + A backup of a Cloud Spanner database. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup.UpdateBackupRequest): + request = gsad_backup.UpdateBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup is not None: + request.backup = backup + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup.name", request.backup.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup(self, + request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + client.delete_backup(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + name (str): + Required. Name of the backup to delete. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.DeleteBackupRequest): + request = backup.DeleteBackupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_backups(self, + request: Optional[Union[backup.ListBackupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupsPager: + r"""Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + parent (str): + Required. The instance to list backups from. Values are + of the form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupsRequest): + request = backup.ListBackupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def restore_database(self, + request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database_id: Optional[str] = None, + backup: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + parent (str): + Required. The name of the instance in which to create + the restored database. This instance must be in the same + project and have the same instance configuration as the + instance containing the source backup. Values are of the + form ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. The id of the database to create and restore + to. This database must not already exist. The + ``database_id`` appended to ``parent`` forms the full + database name of the form + ``projects//instances//databases/``. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup (str): + Name of the backup from which to restore. Values are of + the form + ``projects//instances//backups/``. + + This corresponds to the ``backup`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_database_v1.types.Database` + A Cloud Spanner database. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database_id, backup] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): + request = spanner_database_admin.RestoreDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database_id is not None: + request.database_id = database_id + if backup is not None: + request.backup = backup + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.restore_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_database_admin.Database, + metadata_type=spanner_database_admin.RestoreDatabaseMetadata, + ) + + # Done; return the response. + return response + + def list_database_operations(self, + request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseOperationsPager: + r"""Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + parent (str): + Required. The instance of the database operations. + Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): + request = spanner_database_admin.ListDatabaseOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_database_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabaseOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_operations(self, + request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupOperationsPager: + r"""Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + parent (str): + Required. The instance of the backup operations. Values + are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup.ListBackupOperationsRequest): + request = backup.ListBackupOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_database_roles(self, + request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDatabaseRolesPager: + r"""Lists Cloud Spanner database roles. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]): + The request object. The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + parent (str): + Required. The database whose roles should be listed. + Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager: + The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): + request = spanner_database_admin.ListDatabaseRolesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_database_roles] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatabaseRolesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def add_split_points(self, + request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, + *, + database: Optional[str] = None, + split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.AddSplitPointsResponse: + r"""Adds split points to specified tables, indexes of a + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = client.add_split_points(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]): + The request object. The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + database (str): + Required. The database on whose tables/indexes split + points are to be added. Values are of the form + ``projects//instances//databases/``. + + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): + Required. The split points to add. + This corresponds to the ``split_points`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: + The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, split_points] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): + request = spanner_database_admin.AddSplitPointsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if split_points is not None: + request.split_points = split_points + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.add_split_points] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("database", request.database), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, + *, + parent: Optional[str] = None, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + backup_schedule_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Creates a new backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + parent (str): + Required. The name of the database + that this backup schedule applies to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to + create. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_schedule_id (str): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the + full backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``backup_schedule_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_schedule, backup_schedule_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): + request = gsad_backup_schedule.CreateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if backup_schedule_id is not None: + request.backup_schedule_id = backup_schedule_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_backup_schedule(self, + request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backup_schedule.BackupSchedule: + r"""Gets backup schedule for the input schedule name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + name (str): + Required. The name of the schedule to retrieve. Values + are of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.GetBackupScheduleRequest): + request = backup_schedule.GetBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_backup_schedule(self, + request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, + *, + backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Updates a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated + as specified by ``update_mask`` are required. Other + fields are ignored. + + This corresponds to the ``backup_schedule`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which + fields in the BackupSchedule resource + should be updated. This mask is relative + to the BackupSchedule resource, not to + the request message. The field mask must + always be specified; this prevents any + future fields from being erased + accidentally. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_schedule, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): + request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_schedule is not None: + request.backup_schedule = backup_schedule + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("backup_schedule.name", request.backup_schedule.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_backup_schedule(self, + request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a backup schedule. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + name (str): + Required. The name of the schedule to delete. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): + request = backup_schedule.DeleteBackupScheduleRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_backup_schedules(self, + request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupSchedulesPager: + r"""Lists all the backup schedules for the database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + parent (str): + Required. Database is the parent + resource whose backup schedules should + be listed. Values are of the form + projects//instances//databases/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): + request = backup_schedule.ListBackupSchedulesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupSchedulesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def internal_update_graph_operation(self, + request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, + *, + database: Optional[str] = None, + operation_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: + r"""This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_database_v1 + + def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]): + The request object. Internal request proto, do not use + directly. + database (str): + Internal field, do not use directly. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + operation_id (str): + Internal field, do not use directly. + This corresponds to the ``operation_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: + Internal response proto, do not use + directly. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [database, operation_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): + request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if database is not None: + request.database = database + if operation_id is not None: + request.operation_id = operation_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.internal_update_graph_operation] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DatabaseAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "DatabaseAdminClient", +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py new file mode 100644 index 0000000000..5a5b92e7d1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py @@ -0,0 +1,864 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.longrunning import operations_pb2 # type: ignore + + +class ListDatabasesPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabasesResponse], + request: spanner_database_admin.ListDatabasesRequest, + response: spanner_database_admin.ListDatabasesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_database_admin.Database]: + for page in self.pages: + yield from page.databases + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabasesAsyncPager: + """A pager for iterating through ``list_databases`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``databases`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabases`` requests and continue to iterate + through the ``databases`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabasesResponse]], + request: spanner_database_admin.ListDatabasesRequest, + response: spanner_database_admin.ListDatabasesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabasesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: + async def async_generator(): + async for page in self.pages: + for response in page.databases: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupsPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup.ListBackupsResponse], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupsAsyncPager: + """A pager for iterating through ``list_backups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackups`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup.ListBackupsResponse]], + request: backup.ListBackupsRequest, + response: backup.ListBackupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[backup.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseOperationsPager: + """A pager for iterating through ``list_database_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabaseOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabaseOperationsResponse], + request: spanner_database_admin.ListDatabaseOperationsRequest, + response: spanner_database_admin.ListDatabaseOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseOperationsAsyncPager: + """A pager for iterating through ``list_database_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabaseOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]], + request: spanner_database_admin.ListDatabaseOperationsRequest, + response: spanner_database_admin.ListDatabaseOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupOperationsPager: + """A pager for iterating through ``list_backup_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup.ListBackupOperationsResponse], + request: backup.ListBackupOperationsRequest, + response: backup.ListBackupOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupOperationsAsyncPager: + """A pager for iterating through ``list_backup_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup.ListBackupOperationsResponse]], + request: backup.ListBackupOperationsRequest, + response: backup.ListBackupOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup.ListBackupOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseRolesPager: + """A pager for iterating through ``list_database_roles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``database_roles`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatabaseRoles`` requests and continue to iterate + through the ``database_roles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_database_admin.ListDatabaseRolesResponse], + request: spanner_database_admin.ListDatabaseRolesRequest, + response: spanner_database_admin.ListDatabaseRolesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseRolesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_database_admin.ListDatabaseRolesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_database_admin.DatabaseRole]: + for page in self.pages: + yield from page.database_roles + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDatabaseRolesAsyncPager: + """A pager for iterating through ``list_database_roles`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``database_roles`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatabaseRoles`` requests and continue to iterate + through the ``database_roles`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseRolesResponse]], + request: spanner_database_admin.ListDatabaseRolesRequest, + response: spanner_database_admin.ListDatabaseRolesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_database_admin.ListDatabaseRolesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseRolesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_database_admin.DatabaseRole]: + async def async_generator(): + async for page in self.pages: + for response in page.database_roles: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupSchedulesPager: + """A pager for iterating through ``list_backup_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_schedules`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupSchedules`` requests and continue to iterate + through the ``backup_schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., backup_schedule.ListBackupSchedulesResponse], + request: backup_schedule.ListBackupSchedulesRequest, + response: backup_schedule.ListBackupSchedulesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup_schedule.ListBackupSchedulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backup_schedule.ListBackupSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[backup_schedule.BackupSchedule]: + for page in self.pages: + yield from page.backup_schedules + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBackupSchedulesAsyncPager: + """A pager for iterating through ``list_backup_schedules`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_schedules`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupSchedules`` requests and continue to iterate + through the ``backup_schedules`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[backup_schedule.ListBackupSchedulesResponse]], + request: backup_schedule.ListBackupSchedulesRequest, + response: backup_schedule.ListBackupSchedulesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): + The initial request object. + response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backup_schedule.ListBackupSchedulesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backup_schedule.ListBackupSchedulesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[backup_schedule.BackupSchedule]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_schedules: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst new file mode 100644 index 0000000000..f70c023a98 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DatabaseAdminTransport` is the ABC for all transports. +- public child `DatabaseAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DatabaseAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDatabaseAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DatabaseAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py new file mode 100644 index 0000000000..975834b54d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DatabaseAdminTransport +from .grpc import DatabaseAdminGrpcTransport +from .grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport +from .rest import DatabaseAdminRestTransport +from .rest import DatabaseAdminRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] +_transport_registry['grpc'] = DatabaseAdminGrpcTransport +_transport_registry['grpc_asyncio'] = DatabaseAdminGrpcAsyncIOTransport +_transport_registry['rest'] = DatabaseAdminRestTransport + +__all__ = ( + 'DatabaseAdminTransport', + 'DatabaseAdminGrpcTransport', + 'DatabaseAdminGrpcAsyncIOTransport', + 'DatabaseAdminRestTransport', + 'DatabaseAdminRestInterceptor', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py new file mode 100644 index 0000000000..7d801501d3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py @@ -0,0 +1,795 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.spanner_admin_database_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DatabaseAdminTransport(abc.ABC): + """Abstract transport class for DatabaseAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_databases: gapic_v1.method.wrap_method( + self.list_databases, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database: gapic_v1.method.wrap_method( + self.get_database, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database: gapic_v1.method.wrap_method( + self.update_database, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database_ddl: gapic_v1.method.wrap_method( + self.update_database_ddl, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.drop_database: gapic_v1.method.wrap_method( + self.drop_database, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database_ddl: gapic_v1.method.wrap_method( + self.get_database_ddl, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.create_backup: gapic_v1.method.wrap_method( + self.create_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.copy_backup: gapic_v1.method.wrap_method( + self.copy_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup: gapic_v1.method.wrap_method( + self.get_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup: gapic_v1.method.wrap_method( + self.update_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup: gapic_v1.method.wrap_method( + self.delete_backup, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backups: gapic_v1.method.wrap_method( + self.list_backups, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.restore_database: gapic_v1.method.wrap_method( + self.restore_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_operations: gapic_v1.method.wrap_method( + self.list_database_operations, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_operations: gapic_v1.method.wrap_method( + self.list_backup_operations, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_roles: gapic_v1.method.wrap_method( + self.list_database_roles, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.add_split_points: gapic_v1.method.wrap_method( + self.add_split_points, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_backup_schedule: gapic_v1.method.wrap_method( + self.create_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup_schedule: gapic_v1.method.wrap_method( + self.get_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup_schedule: gapic_v1.method.wrap_method( + self.update_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup_schedule: gapic_v1.method.wrap_method( + self.delete_backup_schedule, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_schedules: gapic_v1.method.wrap_method( + self.list_backup_schedules, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.internal_update_graph_operation: gapic_v1.method.wrap_method( + self.internal_update_graph_operation, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + Union[ + spanner_database_admin.ListDatabasesResponse, + Awaitable[spanner_database_admin.ListDatabasesResponse] + ]]: + raise NotImplementedError() + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + Union[ + spanner_database_admin.Database, + Awaitable[spanner_database_admin.Database] + ]]: + raise NotImplementedError() + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + Union[ + spanner_database_admin.GetDatabaseDdlResponse, + Awaitable[spanner_database_admin.GetDatabaseDdlResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + Union[ + backup.Backup, + Awaitable[backup.Backup] + ]]: + raise NotImplementedError() + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + Union[ + gsad_backup.Backup, + Awaitable[gsad_backup.Backup] + ]]: + raise NotImplementedError() + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + Union[ + backup.ListBackupsResponse, + Awaitable[backup.ListBackupsResponse] + ]]: + raise NotImplementedError() + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + Union[ + spanner_database_admin.ListDatabaseOperationsResponse, + Awaitable[spanner_database_admin.ListDatabaseOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + Union[ + backup.ListBackupOperationsResponse, + Awaitable[backup.ListBackupOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + Union[ + spanner_database_admin.ListDatabaseRolesResponse, + Awaitable[spanner_database_admin.ListDatabaseRolesResponse] + ]]: + raise NotImplementedError() + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + Union[ + spanner_database_admin.AddSplitPointsResponse, + Awaitable[spanner_database_admin.AddSplitPointsResponse] + ]]: + raise NotImplementedError() + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + Union[ + gsad_backup_schedule.BackupSchedule, + Awaitable[gsad_backup_schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + Union[ + backup_schedule.BackupSchedule, + Awaitable[backup_schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + Union[ + gsad_backup_schedule.BackupSchedule, + Awaitable[gsad_backup_schedule.BackupSchedule] + ]]: + raise NotImplementedError() + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + Union[ + backup_schedule.ListBackupSchedulesResponse, + Awaitable[backup_schedule.ListBackupSchedulesResponse] + ]]: + raise NotImplementedError() + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + Union[ + spanner_database_admin.InternalUpdateGraphOperationResponse, + Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DatabaseAdminTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py new file mode 100644 index 0000000000..8f548acc7c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py @@ -0,0 +1,1285 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DatabaseAdminGrpcTransport(DatabaseAdminTransport): + """gRPC backend transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + spanner_database_admin.ListDatabasesResponse]: + r"""Return a callable for the list databases method over gRPC. + + Lists Cloud Spanner databases. + + Returns: + Callable[[~.ListDatabasesRequest], + ~.ListDatabasesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + spanner_database_admin.Database]: + r"""Return a callable for the get database method over gRPC. + + Gets the state of a Cloud Spanner database. + + Returns: + Callable[[~.GetDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, + response_deserializer=spanner_database_admin.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the update database method over gRPC. + + Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.UpdateDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database' not in self._stubs: + self._stubs['update_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', + request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database'] + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + operations_pb2.Operation]: + r"""Return a callable for the update database ddl method over gRPC. + + Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Returns: + Callable[[~.UpdateDatabaseDdlRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database_ddl' not in self._stubs: + self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database_ddl'] + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + empty_pb2.Empty]: + r"""Return a callable for the drop database method over gRPC. + + Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + Returns: + Callable[[~.DropDatabaseRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'drop_database' not in self._stubs: + self._stubs['drop_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['drop_database'] + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + spanner_database_admin.GetDatabaseDdlResponse]: + r"""Return a callable for the get database ddl method over gRPC. + + Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Returns: + Callable[[~.GetDatabaseDdlRequest], + ~.GetDatabaseDdlResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database_ddl' not in self._stubs: + self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, + response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, + ) + return self._stubs['get_database_ddl'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + operations_pb2.Operation]: + r"""Return a callable for the create backup method over gRPC. + + Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Returns: + Callable[[~.CreateBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup' not in self._stubs: + self._stubs['create_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + request_serializer=gsad_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_backup'] + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + operations_pb2.Operation]: + r"""Return a callable for the copy backup method over gRPC. + + Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + Returns: + Callable[[~.CopyBackupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_backup' not in self._stubs: + self._stubs['copy_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', + request_serializer=backup.CopyBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_backup'] + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + backup.Backup]: + r"""Return a callable for the get backup method over gRPC. + + Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.GetBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + gsad_backup.Backup]: + r"""Return a callable for the update backup method over gRPC. + + Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.UpdateBackupRequest], + ~.Backup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup' not in self._stubs: + self._stubs['update_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + request_serializer=gsad_backup.UpdateBackupRequest.serialize, + response_deserializer=gsad_backup.Backup.deserialize, + ) + return self._stubs['update_backup'] + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.DeleteBackupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + backup.ListBackupsResponse]: + r"""Return a callable for the list backups method over gRPC. + + Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Returns: + Callable[[~.ListBackupsRequest], + ~.ListBackupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + r"""Return a callable for the restore database method over gRPC. + + Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Returns: + Callable[[~.RestoreDatabaseRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + spanner_database_admin.ListDatabaseOperationsResponse]: + r"""Return a callable for the list database operations method over gRPC. + + Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Returns: + Callable[[~.ListDatabaseOperationsRequest], + ~.ListDatabaseOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_operations' not in self._stubs: + self._stubs['list_database_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, + ) + return self._stubs['list_database_operations'] + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + backup.ListBackupOperationsResponse]: + r"""Return a callable for the list backup operations method over gRPC. + + Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Returns: + Callable[[~.ListBackupOperationsRequest], + ~.ListBackupOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_operations' not in self._stubs: + self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + request_serializer=backup.ListBackupOperationsRequest.serialize, + response_deserializer=backup.ListBackupOperationsResponse.deserialize, + ) + return self._stubs['list_backup_operations'] + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + spanner_database_admin.ListDatabaseRolesResponse]: + r"""Return a callable for the list database roles method over gRPC. + + Lists Cloud Spanner database roles. + + Returns: + Callable[[~.ListDatabaseRolesRequest], + ~.ListDatabaseRolesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_roles' not in self._stubs: + self._stubs['list_database_roles'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', + request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, + ) + return self._stubs['list_database_roles'] + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + spanner_database_admin.AddSplitPointsResponse]: + r"""Return a callable for the add split points method over gRPC. + + Adds split points to specified tables, indexes of a + database. + + Returns: + Callable[[~.AddSplitPointsRequest], + ~.AddSplitPointsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'add_split_points' not in self._stubs: + self._stubs['add_split_points'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', + request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, + response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, + ) + return self._stubs['add_split_points'] + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a new backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup_schedule' not in self._stubs: + self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', + request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['create_backup_schedule'] + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + backup_schedule.BackupSchedule]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets backup schedule for the input schedule name. + + Returns: + Callable[[~.GetBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup_schedule' not in self._stubs: + self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', + request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, + response_deserializer=backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['get_backup_schedule'] + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + ~.BackupSchedule]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup_schedule' not in self._stubs: + self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', + request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['update_backup_schedule'] + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup_schedule' not in self._stubs: + self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', + request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup_schedule'] + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + backup_schedule.ListBackupSchedulesResponse]: + r"""Return a callable for the list backup schedules method over gRPC. + + Lists all the backup schedules for the database. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + ~.ListBackupSchedulesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_schedules' not in self._stubs: + self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', + request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, + response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs['list_backup_schedules'] + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + spanner_database_admin.InternalUpdateGraphOperationResponse]: + r"""Return a callable for the internal update graph + operation method over gRPC. + + This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + Returns: + Callable[[~.InternalUpdateGraphOperationRequest], + ~.InternalUpdateGraphOperationResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'internal_update_graph_operation' not in self._stubs: + self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', + request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, + response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, + ) + return self._stubs['internal_update_graph_operation'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DatabaseAdminGrpcTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..06cfaa6349 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py @@ -0,0 +1,1656 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import DatabaseAdminGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DatabaseAdminGrpcAsyncIOTransport(DatabaseAdminTransport): + """gRPC AsyncIO backend transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + Awaitable[spanner_database_admin.ListDatabasesResponse]]: + r"""Return a callable for the list databases method over gRPC. + + Lists Cloud Spanner databases. + + Returns: + Callable[[~.ListDatabasesRequest], + Awaitable[~.ListDatabasesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_databases' not in self._stubs: + self._stubs['list_databases'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', + request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, + ) + return self._stubs['list_databases'] + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Cloud Spanner database and starts to prepare it + for serving. The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format ``/operations/`` and can be + used to track preparation of the database. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_database' not in self._stubs: + self._stubs['create_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', + request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_database'] + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + Awaitable[spanner_database_admin.Database]]: + r"""Return a callable for the get database method over gRPC. + + Gets the state of a Cloud Spanner database. + + Returns: + Callable[[~.GetDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database' not in self._stubs: + self._stubs['get_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', + request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, + response_deserializer=spanner_database_admin.Database.deserialize, + ) + return self._stubs['get_database'] + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update database method over gRPC. + + Updates a Cloud Spanner database. The returned [long-running + operation][google.longrunning.Operation] can be used to track + the progress of updating the database. If the named database + does not exist, returns ``NOT_FOUND``. + + While the operation is pending: + + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field is set to true. + - Cancelling the operation is best-effort. If the cancellation + succeeds, the operation metadata's + [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] + is set, the updates are reverted, and the operation terminates + with a ``CANCELLED`` status. + - New UpdateDatabase requests will return a + ``FAILED_PRECONDITION`` error until the pending operation is + done (returns successfully or with error). + - Reading the database via the API continues to give the + pre-request values. + + Upon completion of the returned operation: + + - The new values are in effect and readable via the API. + - The database's + [reconciling][google.spanner.admin.database.v1.Database.reconciling] + field becomes false. + + The returned [long-running + operation][google.longrunning.Operation] will have a name of the + format + ``projects//instances//databases//operations/`` + and can be used to track the database modification. The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Database][google.spanner.admin.database.v1.Database], if + successful. + + Returns: + Callable[[~.UpdateDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database' not in self._stubs: + self._stubs['update_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', + request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database'] + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update database ddl method over gRPC. + + Updates the schema of a Cloud Spanner database by + creating/altering/dropping tables, columns, indexes, etc. The + returned [long-running operation][google.longrunning.Operation] + will have a name of the format + ``/operations/`` and can be used to + track execution of the schema change(s). The + [metadata][google.longrunning.Operation.metadata] field type is + [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + The operation has no response. + + Returns: + Callable[[~.UpdateDatabaseDdlRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_database_ddl' not in self._stubs: + self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', + request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_database_ddl'] + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the drop database method over gRPC. + + Drops (aka deletes) a Cloud Spanner database. Completed backups + for the database will be retained according to their + ``expire_time``. Note: Cloud Spanner might continue to accept + requests for a few seconds after the database has been deleted. + + Returns: + Callable[[~.DropDatabaseRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'drop_database' not in self._stubs: + self._stubs['drop_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', + request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['drop_database'] + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + Awaitable[spanner_database_admin.GetDatabaseDdlResponse]]: + r"""Return a callable for the get database ddl method over gRPC. + + Returns the schema of a Cloud Spanner database as a list of + formatted DDL statements. This method does not show pending + schema updates, those may be queried using the + [Operations][google.longrunning.Operations] API. + + Returns: + Callable[[~.GetDatabaseDdlRequest], + Awaitable[~.GetDatabaseDdlResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_database_ddl' not in self._stubs: + self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', + request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, + response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, + ) + return self._stubs['get_database_ddl'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on a database or backup resource. + Replaces any existing policy. + + Authorization requires ``spanner.databases.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.setIamPolicy`` + permission on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a database or backup + resource. Returns an empty policy if a database or backup exists + but does not have a policy set. + + Authorization requires ``spanner.databases.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. For + backups, authorization requires ``spanner.backups.getIamPolicy`` + permission on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + database or backup resource. + + Attempting this RPC on a non-existent Cloud Spanner database + will result in a NOT_FOUND error if the user has + ``spanner.databases.list`` permission on the containing Cloud + Spanner instance. Otherwise returns an empty set of permissions. + Calling this method on a backup that does not exist will result + in a NOT_FOUND error if the user has ``spanner.backups.list`` + permission on the containing instance. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create backup method over gRPC. + + Starts creating a new Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track creation of the backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + creation and delete the backup. There can be only one pending + backup creation per database. Backup creation of different + databases can run concurrently. + + Returns: + Callable[[~.CreateBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup' not in self._stubs: + self._stubs['create_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', + request_serializer=gsad_backup.CreateBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_backup'] + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the copy backup method over gRPC. + + Starts copying a Cloud Spanner Backup. The returned backup + [long-running operation][google.longrunning.Operation] will have + a name of the format + ``projects//instances//backups//operations/`` + and can be used to track copying of the backup. The operation is + associated with the destination backup. The + [metadata][google.longrunning.Operation.metadata] field type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + The [response][google.longrunning.Operation.response] field type + is [Backup][google.spanner.admin.database.v1.Backup], if + successful. Cancelling the returned operation will stop the + copying and delete the destination backup. Concurrent CopyBackup + requests can run on the same source backup. + + Returns: + Callable[[~.CopyBackupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_backup' not in self._stubs: + self._stubs['copy_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', + request_serializer=backup.CopyBackupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_backup'] + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + Awaitable[backup.Backup]]: + r"""Return a callable for the get backup method over gRPC. + + Gets metadata on a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.GetBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup' not in self._stubs: + self._stubs['get_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', + request_serializer=backup.GetBackupRequest.serialize, + response_deserializer=backup.Backup.deserialize, + ) + return self._stubs['get_backup'] + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + Awaitable[gsad_backup.Backup]]: + r"""Return a callable for the update backup method over gRPC. + + Updates a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.UpdateBackupRequest], + Awaitable[~.Backup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup' not in self._stubs: + self._stubs['update_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', + request_serializer=gsad_backup.UpdateBackupRequest.serialize, + response_deserializer=gsad_backup.Backup.deserialize, + ) + return self._stubs['update_backup'] + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup method over gRPC. + + Deletes a pending or completed + [Backup][google.spanner.admin.database.v1.Backup]. + + Returns: + Callable[[~.DeleteBackupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup' not in self._stubs: + self._stubs['delete_backup'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', + request_serializer=backup.DeleteBackupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup'] + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + Awaitable[backup.ListBackupsResponse]]: + r"""Return a callable for the list backups method over gRPC. + + Lists completed and pending backups. Backups returned are + ordered by ``create_time`` in descending order, starting from + the most recent ``create_time``. + + Returns: + Callable[[~.ListBackupsRequest], + Awaitable[~.ListBackupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backups' not in self._stubs: + self._stubs['list_backups'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', + request_serializer=backup.ListBackupsRequest.serialize, + response_deserializer=backup.ListBackupsResponse.deserialize, + ) + return self._stubs['list_backups'] + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the restore database method over gRPC. + + Create a new database by restoring from a completed backup. The + new database must be in the same project and in an instance with + the same instance configuration as the instance containing the + backup. The returned database [long-running + operation][google.longrunning.Operation] has a name of the + format + ``projects//instances//databases//operations/``, + and can be used to track the progress of the operation, and to + cancel it. The [metadata][google.longrunning.Operation.metadata] + field type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + The [response][google.longrunning.Operation.response] type is + [Database][google.spanner.admin.database.v1.Database], if + successful. Cancelling the returned operation will stop the + restore and delete the database. There can be only one database + being restored into an instance at a time. Once the restore + operation completes, a new restore operation can be initiated, + without waiting for the optimize operation associated with the + first restore to complete. + + Returns: + Callable[[~.RestoreDatabaseRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'restore_database' not in self._stubs: + self._stubs['restore_database'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', + request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['restore_database'] + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]]: + r"""Return a callable for the list database operations method over gRPC. + + Lists database + [longrunning-operations][google.longrunning.Operation]. A + database operation has a name of the form + ``projects//instances//databases//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. + + Returns: + Callable[[~.ListDatabaseOperationsRequest], + Awaitable[~.ListDatabaseOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_operations' not in self._stubs: + self._stubs['list_database_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', + request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, + ) + return self._stubs['list_database_operations'] + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + Awaitable[backup.ListBackupOperationsResponse]]: + r"""Return a callable for the list backup operations method over gRPC. + + Lists the backup [long-running + operations][google.longrunning.Operation] in the given instance. + A backup operation has a name of the form + ``projects//instances//backups//operations/``. + The long-running operation + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.progress.start_time`` in descending + order starting from the most recently started operation. + + Returns: + Callable[[~.ListBackupOperationsRequest], + Awaitable[~.ListBackupOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_operations' not in self._stubs: + self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', + request_serializer=backup.ListBackupOperationsRequest.serialize, + response_deserializer=backup.ListBackupOperationsResponse.deserialize, + ) + return self._stubs['list_backup_operations'] + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + Awaitable[spanner_database_admin.ListDatabaseRolesResponse]]: + r"""Return a callable for the list database roles method over gRPC. + + Lists Cloud Spanner database roles. + + Returns: + Callable[[~.ListDatabaseRolesRequest], + Awaitable[~.ListDatabaseRolesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_database_roles' not in self._stubs: + self._stubs['list_database_roles'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', + request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, + response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, + ) + return self._stubs['list_database_roles'] + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + Awaitable[spanner_database_admin.AddSplitPointsResponse]]: + r"""Return a callable for the add split points method over gRPC. + + Adds split points to specified tables, indexes of a + database. + + Returns: + Callable[[~.AddSplitPointsRequest], + Awaitable[~.AddSplitPointsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'add_split_points' not in self._stubs: + self._stubs['add_split_points'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', + request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, + response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, + ) + return self._stubs['add_split_points'] + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + Awaitable[gsad_backup_schedule.BackupSchedule]]: + r"""Return a callable for the create backup schedule method over gRPC. + + Creates a new backup schedule. + + Returns: + Callable[[~.CreateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_backup_schedule' not in self._stubs: + self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', + request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['create_backup_schedule'] + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + Awaitable[backup_schedule.BackupSchedule]]: + r"""Return a callable for the get backup schedule method over gRPC. + + Gets backup schedule for the input schedule name. + + Returns: + Callable[[~.GetBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_backup_schedule' not in self._stubs: + self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', + request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, + response_deserializer=backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['get_backup_schedule'] + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + Awaitable[gsad_backup_schedule.BackupSchedule]]: + r"""Return a callable for the update backup schedule method over gRPC. + + Updates a backup schedule. + + Returns: + Callable[[~.UpdateBackupScheduleRequest], + Awaitable[~.BackupSchedule]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_backup_schedule' not in self._stubs: + self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', + request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, + response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, + ) + return self._stubs['update_backup_schedule'] + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete backup schedule method over gRPC. + + Deletes a backup schedule. + + Returns: + Callable[[~.DeleteBackupScheduleRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_backup_schedule' not in self._stubs: + self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', + request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_backup_schedule'] + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + Awaitable[backup_schedule.ListBackupSchedulesResponse]]: + r"""Return a callable for the list backup schedules method over gRPC. + + Lists all the backup schedules for the database. + + Returns: + Callable[[~.ListBackupSchedulesRequest], + Awaitable[~.ListBackupSchedulesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_backup_schedules' not in self._stubs: + self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', + request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, + response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, + ) + return self._stubs['list_backup_schedules'] + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse]]: + r"""Return a callable for the internal update graph + operation method over gRPC. + + This is an internal API called by Spanner Graph jobs. + You should never need to call this API directly. + + Returns: + Callable[[~.InternalUpdateGraphOperationRequest], + Awaitable[~.InternalUpdateGraphOperationResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'internal_update_graph_operation' not in self._stubs: + self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( + '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', + request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, + response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, + ) + return self._stubs['internal_update_graph_operation'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_databases: self._wrap_method( + self.list_databases, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_database: self._wrap_method( + self.create_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database: self._wrap_method( + self.get_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database: self._wrap_method( + self.update_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_database_ddl: self._wrap_method( + self.update_database_ddl, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.drop_database: self._wrap_method( + self.drop_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_database_ddl: self._wrap_method( + self.get_database_ddl, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.create_backup: self._wrap_method( + self.create_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.copy_backup: self._wrap_method( + self.copy_backup, + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup: self._wrap_method( + self.get_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup: self._wrap_method( + self.update_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup: self._wrap_method( + self.delete_backup, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backups: self._wrap_method( + self.list_backups, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.restore_database: self._wrap_method( + self.restore_database, + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_operations: self._wrap_method( + self.list_database_operations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_operations: self._wrap_method( + self.list_backup_operations, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_database_roles: self._wrap_method( + self.list_database_roles, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.add_split_points: self._wrap_method( + self.add_split_points, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_backup_schedule: self._wrap_method( + self.create_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_backup_schedule: self._wrap_method( + self.get_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.update_backup_schedule: self._wrap_method( + self.update_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_backup_schedule: self._wrap_method( + self.delete_backup_schedule, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_backup_schedules: self._wrap_method( + self.list_backup_schedules, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.internal_update_graph_operation: self._wrap_method( + self.internal_update_graph_operation, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'DatabaseAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py new file mode 100644 index 0000000000..012c64468c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py @@ -0,0 +1,5336 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseDatabaseAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DatabaseAdminRestInterceptor: + """Interceptor for DatabaseAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DatabaseAdminRestTransport. + + .. code-block:: python + class MyCustomDatabaseAdminInterceptor(DatabaseAdminRestInterceptor): + def pre_add_split_points(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_add_split_points(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_copy_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_copy_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_drop_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_database_ddl(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_database_ddl(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_internal_update_graph_operation(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_internal_update_graph_operation(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backups(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_backup_schedules(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_schedules(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_database_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_database_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_database_roles(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_database_roles(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_databases(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_databases(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_restore_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_restore_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_schedule(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_schedule(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_database_ddl(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_database_ddl(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DatabaseAdminRestTransport(interceptor=MyCustomDatabaseAdminInterceptor()) + client = DatabaseAdminClient(transport=transport) + + + """ + def pre_add_split_points(self, request: spanner_database_admin.AddSplitPointsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for add_split_points + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_add_split_points(self, response: spanner_database_admin.AddSplitPointsResponse) -> spanner_database_admin.AddSplitPointsResponse: + """Post-rpc interceptor for add_split_points + + DEPRECATED. Please use the `post_add_split_points_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_add_split_points` interceptor runs + before the `post_add_split_points_with_metadata` interceptor. + """ + return response + + def post_add_split_points_with_metadata(self, response: spanner_database_admin.AddSplitPointsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_split_points + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_add_split_points_with_metadata` + interceptor in new development instead of the `post_add_split_points` interceptor. + When both interceptors are used, this `post_add_split_points_with_metadata` interceptor runs after the + `post_add_split_points` interceptor. The (possibly modified) response returned by + `post_add_split_points` will be passed to + `post_add_split_points_with_metadata`. + """ + return response, metadata + + def pre_copy_backup(self, request: backup.CopyBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.CopyBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for copy_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_copy_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for copy_backup + + DEPRECATED. Please use the `post_copy_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_copy_backup` interceptor runs + before the `post_copy_backup_with_metadata` interceptor. + """ + return response + + def post_copy_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for copy_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_copy_backup_with_metadata` + interceptor in new development instead of the `post_copy_backup` interceptor. + When both interceptors are used, this `post_copy_backup_with_metadata` interceptor runs after the + `post_copy_backup` interceptor. The (possibly modified) response returned by + `post_copy_backup` will be passed to + `post_copy_backup_with_metadata`. + """ + return response, metadata + + def pre_create_backup(self, request: gsad_backup.CreateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.CreateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_backup + + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. + """ + return response + + def post_create_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + + def pre_create_backup_schedule(self, request: gsad_backup_schedule.CreateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.CreateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: + """Post-rpc interceptor for create_backup_schedule + + DEPRECATED. Please use the `post_create_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_create_backup_schedule` interceptor runs + before the `post_create_backup_schedule_with_metadata` interceptor. + """ + return response + + def post_create_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_schedule_with_metadata` + interceptor in new development instead of the `post_create_backup_schedule` interceptor. + When both interceptors are used, this `post_create_backup_schedule_with_metadata` interceptor runs after the + `post_create_backup_schedule` interceptor. The (possibly modified) response returned by + `post_create_backup_schedule` will be passed to + `post_create_backup_schedule_with_metadata`. + """ + return response, metadata + + def pre_create_database(self, request: spanner_database_admin.CreateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_create_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_database + + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. + """ + return response + + def post_create_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + + def pre_delete_backup(self, request: backup.DeleteBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def pre_delete_backup_schedule(self, request: backup_schedule.DeleteBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.DeleteBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def pre_drop_database(self, request: spanner_database_admin.DropDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.DropDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for drop_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def pre_get_backup(self, request: backup.GetBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_backup(self, response: backup.Backup) -> backup.Backup: + """Post-rpc interceptor for get_backup + + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. + """ + return response + + def post_get_backup_with_metadata(self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + + def pre_get_backup_schedule(self, request: backup_schedule.GetBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.GetBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_backup_schedule(self, response: backup_schedule.BackupSchedule) -> backup_schedule.BackupSchedule: + """Post-rpc interceptor for get_backup_schedule + + DEPRECATED. Please use the `post_get_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_backup_schedule` interceptor runs + before the `post_get_backup_schedule_with_metadata` interceptor. + """ + return response + + def post_get_backup_schedule_with_metadata(self, response: backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_schedule_with_metadata` + interceptor in new development instead of the `post_get_backup_schedule` interceptor. + When both interceptors are used, this `post_get_backup_schedule_with_metadata` interceptor runs after the + `post_get_backup_schedule` interceptor. The (possibly modified) response returned by + `post_get_backup_schedule` will be passed to + `post_get_backup_schedule_with_metadata`. + """ + return response, metadata + + def pre_get_database(self, request: spanner_database_admin.GetDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_database(self, response: spanner_database_admin.Database) -> spanner_database_admin.Database: + """Post-rpc interceptor for get_database + + DEPRECATED. Please use the `post_get_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_database` interceptor runs + before the `post_get_database_with_metadata` interceptor. + """ + return response + + def post_get_database_with_metadata(self, response: spanner_database_admin.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_database_with_metadata` + interceptor in new development instead of the `post_get_database` interceptor. + When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the + `post_get_database` interceptor. The (possibly modified) response returned by + `post_get_database` will be passed to + `post_get_database_with_metadata`. + """ + return response, metadata + + def pre_get_database_ddl(self, request: spanner_database_admin.GetDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_database_ddl + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_database_ddl(self, response: spanner_database_admin.GetDatabaseDdlResponse) -> spanner_database_admin.GetDatabaseDdlResponse: + """Post-rpc interceptor for get_database_ddl + + DEPRECATED. Please use the `post_get_database_ddl_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_database_ddl` interceptor runs + before the `post_get_database_ddl_with_metadata` interceptor. + """ + return response + + def post_get_database_ddl_with_metadata(self, response: spanner_database_admin.GetDatabaseDdlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database_ddl + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_database_ddl_with_metadata` + interceptor in new development instead of the `post_get_database_ddl` interceptor. + When both interceptors are used, this `post_get_database_ddl_with_metadata` interceptor runs after the + `post_get_database_ddl` interceptor. The (possibly modified) response returned by + `post_get_database_ddl` will be passed to + `post_get_database_ddl_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_list_backup_operations(self, request: backup.ListBackupOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backup_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backup_operations(self, response: backup.ListBackupOperationsResponse) -> backup.ListBackupOperationsResponse: + """Post-rpc interceptor for list_backup_operations + + DEPRECATED. Please use the `post_list_backup_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_backup_operations` interceptor runs + before the `post_list_backup_operations_with_metadata` interceptor. + """ + return response + + def post_list_backup_operations_with_metadata(self, response: backup.ListBackupOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backup_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backup_operations_with_metadata` + interceptor in new development instead of the `post_list_backup_operations` interceptor. + When both interceptors are used, this `post_list_backup_operations_with_metadata` interceptor runs after the + `post_list_backup_operations` interceptor. The (possibly modified) response returned by + `post_list_backup_operations` will be passed to + `post_list_backup_operations_with_metadata`. + """ + return response, metadata + + def pre_list_backups(self, request: backup.ListBackupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backups + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backups(self, response: backup.ListBackupsResponse) -> backup.ListBackupsResponse: + """Post-rpc interceptor for list_backups + + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. + """ + return response + + def post_list_backups_with_metadata(self, response: backup.ListBackupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + + def pre_list_backup_schedules(self, request: backup_schedule.ListBackupSchedulesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_backup_schedules + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_backup_schedules(self, response: backup_schedule.ListBackupSchedulesResponse) -> backup_schedule.ListBackupSchedulesResponse: + """Post-rpc interceptor for list_backup_schedules + + DEPRECATED. Please use the `post_list_backup_schedules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_backup_schedules` interceptor runs + before the `post_list_backup_schedules_with_metadata` interceptor. + """ + return response + + def post_list_backup_schedules_with_metadata(self, response: backup_schedule.ListBackupSchedulesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backup_schedules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backup_schedules_with_metadata` + interceptor in new development instead of the `post_list_backup_schedules` interceptor. + When both interceptors are used, this `post_list_backup_schedules_with_metadata` interceptor runs after the + `post_list_backup_schedules` interceptor. The (possibly modified) response returned by + `post_list_backup_schedules` will be passed to + `post_list_backup_schedules_with_metadata`. + """ + return response, metadata + + def pre_list_database_operations(self, request: spanner_database_admin.ListDatabaseOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_database_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_database_operations(self, response: spanner_database_admin.ListDatabaseOperationsResponse) -> spanner_database_admin.ListDatabaseOperationsResponse: + """Post-rpc interceptor for list_database_operations + + DEPRECATED. Please use the `post_list_database_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_database_operations` interceptor runs + before the `post_list_database_operations_with_metadata` interceptor. + """ + return response + + def post_list_database_operations_with_metadata(self, response: spanner_database_admin.ListDatabaseOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_database_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_database_operations_with_metadata` + interceptor in new development instead of the `post_list_database_operations` interceptor. + When both interceptors are used, this `post_list_database_operations_with_metadata` interceptor runs after the + `post_list_database_operations` interceptor. The (possibly modified) response returned by + `post_list_database_operations` will be passed to + `post_list_database_operations_with_metadata`. + """ + return response, metadata + + def pre_list_database_roles(self, request: spanner_database_admin.ListDatabaseRolesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_database_roles + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_database_roles(self, response: spanner_database_admin.ListDatabaseRolesResponse) -> spanner_database_admin.ListDatabaseRolesResponse: + """Post-rpc interceptor for list_database_roles + + DEPRECATED. Please use the `post_list_database_roles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_database_roles` interceptor runs + before the `post_list_database_roles_with_metadata` interceptor. + """ + return response + + def post_list_database_roles_with_metadata(self, response: spanner_database_admin.ListDatabaseRolesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_database_roles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_database_roles_with_metadata` + interceptor in new development instead of the `post_list_database_roles` interceptor. + When both interceptors are used, this `post_list_database_roles_with_metadata` interceptor runs after the + `post_list_database_roles` interceptor. The (possibly modified) response returned by + `post_list_database_roles` will be passed to + `post_list_database_roles_with_metadata`. + """ + return response, metadata + + def pre_list_databases(self, request: spanner_database_admin.ListDatabasesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_databases + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_databases(self, response: spanner_database_admin.ListDatabasesResponse) -> spanner_database_admin.ListDatabasesResponse: + """Post-rpc interceptor for list_databases + + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. + """ + return response + + def post_list_databases_with_metadata(self, response: spanner_database_admin.ListDatabasesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + + def pre_restore_database(self, request: spanner_database_admin.RestoreDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.RestoreDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for restore_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_restore_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for restore_database + + DEPRECATED. Please use the `post_restore_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_restore_database` interceptor runs + before the `post_restore_database_with_metadata` interceptor. + """ + return response + + def post_restore_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_restore_database_with_metadata` + interceptor in new development instead of the `post_restore_database` interceptor. + When both interceptors are used, this `post_restore_database_with_metadata` interceptor runs after the + `post_restore_database` interceptor. The (possibly modified) response returned by + `post_restore_database` will be passed to + `post_restore_database_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + def pre_update_backup(self, request: gsad_backup.UpdateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.UpdateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_backup + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_backup(self, response: gsad_backup.Backup) -> gsad_backup.Backup: + """Post-rpc interceptor for update_backup + + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. + """ + return response + + def post_update_backup_with_metadata(self, response: gsad_backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + + def pre_update_backup_schedule(self, request: gsad_backup_schedule.UpdateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.UpdateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_backup_schedule + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: + """Post-rpc interceptor for update_backup_schedule + + DEPRECATED. Please use the `post_update_backup_schedule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_backup_schedule` interceptor runs + before the `post_update_backup_schedule_with_metadata` interceptor. + """ + return response + + def post_update_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup_schedule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_schedule_with_metadata` + interceptor in new development instead of the `post_update_backup_schedule` interceptor. + When both interceptors are used, this `post_update_backup_schedule_with_metadata` interceptor runs after the + `post_update_backup_schedule` interceptor. The (possibly modified) response returned by + `post_update_backup_schedule` will be passed to + `post_update_backup_schedule_with_metadata`. + """ + return response, metadata + + def pre_update_database(self, request: spanner_database_admin.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_database + + DEPRECATED. Please use the `post_update_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_database` interceptor runs + before the `post_update_database_with_metadata` interceptor. + """ + return response + + def post_update_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_database_with_metadata` + interceptor in new development instead of the `post_update_database` interceptor. + When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the + `post_update_database` interceptor. The (possibly modified) response returned by + `post_update_database` will be passed to + `post_update_database_with_metadata`. + """ + return response, metadata + + def pre_update_database_ddl(self, request: spanner_database_admin.UpdateDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_database_ddl + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_update_database_ddl(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_database_ddl + + DEPRECATED. Please use the `post_update_database_ddl_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. This `post_update_database_ddl` interceptor runs + before the `post_update_database_ddl_with_metadata` interceptor. + """ + return response + + def post_update_database_ddl_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database_ddl + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DatabaseAdmin server but before it is returned to user code. + + We recommend only using this `post_update_database_ddl_with_metadata` + interceptor in new development instead of the `post_update_database_ddl` interceptor. + When both interceptors are used, this `post_update_database_ddl_with_metadata` interceptor runs after the + `post_update_database_ddl` interceptor. The (possibly modified) response returned by + `post_update_database_ddl` will be passed to + `post_update_database_ddl_with_metadata`. + """ + return response, metadata + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DatabaseAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DatabaseAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DatabaseAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DatabaseAdminRestInterceptor + + +class DatabaseAdminRestTransport(_BaseDatabaseAdminRestTransport): + """REST backend synchronous transport for DatabaseAdmin. + + Cloud Spanner Database Admin API + + The Cloud Spanner Database Admin API can be used to: + + - create, drop, and list databases + - update the schema of pre-existing databases + - create, delete, copy and list backups for a database + - restore a database from an existing backup + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[DatabaseAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DatabaseAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _AddSplitPoints(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.AddSplitPoints") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.AddSplitPointsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.AddSplitPointsResponse: + r"""Call the add split points method over HTTP. + + Args: + request (~.spanner_database_admin.AddSplitPointsRequest): + The request object. The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.AddSplitPointsResponse: + The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_http_options() + + request, metadata = self._interceptor.pre_add_split_points(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.AddSplitPoints", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "AddSplitPoints", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._AddSplitPoints._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.AddSplitPointsResponse() + pb_resp = spanner_database_admin.AddSplitPointsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_add_split_points(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_split_points_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.AddSplitPointsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.add_split_points", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "AddSplitPoints", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CopyBackup(_BaseDatabaseAdminRestTransport._BaseCopyBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CopyBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: backup.CopyBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the copy backup method over HTTP. + + Args: + request (~.backup.CopyBackupRequest): + The request object. The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_http_options() + + request, metadata = self._interceptor.pre_copy_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CopyBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CopyBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CopyBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_copy_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_copy_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.copy_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CopyBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateBackup(_BaseDatabaseAdminRestTransport._BaseCreateBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CreateBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup.CreateBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create backup method over HTTP. + + Args: + request (~.gsad_backup.CreateBackupRequest): + The request object. The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_http_options() + + request, metadata = self._interceptor.pre_create_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CreateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CreateBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup_schedule.CreateBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Call the create backup schedule method over HTTP. + + Args: + request (~.gsad_backup_schedule.CreateBackupScheduleRequest): + The request object. The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gsad_backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_create_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CreateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup_schedule.BackupSchedule() + pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_schedule_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup_schedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDatabase(_BaseDatabaseAdminRestTransport._BaseCreateDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CreateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.CreateDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create database method over HTTP. + + Args: + request (~.spanner_database_admin.CreateDatabaseRequest): + The request object. The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_http_options() + + request, metadata = self._interceptor.pre_create_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CreateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CreateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteBackup(_BaseDatabaseAdminRestTransport._BaseDeleteBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DeleteBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.DeleteBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete backup method over HTTP. + + Args: + request (~.backup.DeleteBackupRequest): + The request object. The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_http_options() + + request, metadata = self._interceptor.pre_delete_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DeleteBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DeleteBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteBackupSchedule(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DeleteBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup_schedule.DeleteBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete backup schedule method over HTTP. + + Args: + request (~.backup_schedule.DeleteBackupScheduleRequest): + The request object. The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_delete_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DeleteBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DeleteBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DropDatabase(_BaseDatabaseAdminRestTransport._BaseDropDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DropDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.DropDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the drop database method over HTTP. + + Args: + request (~.spanner_database_admin.DropDatabaseRequest): + The request object. The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_http_options() + + request, metadata = self._interceptor.pre_drop_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DropDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DropDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DropDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBackup(_BaseDatabaseAdminRestTransport._BaseGetBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.GetBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup.Backup: + r"""Call the get backup method over HTTP. + + Args: + request (~.backup.GetBackupRequest): + The request object. The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup.Backup: + A backup of a Cloud Spanner database. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_http_options() + + request, metadata = self._interceptor.pre_get_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.Backup() + pb_resp = backup.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup.Backup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetBackupSchedule(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup_schedule.GetBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup_schedule.BackupSchedule: + r"""Call the get backup schedule method over HTTP. + + Args: + request (~.backup_schedule.GetBackupScheduleRequest): + The request object. The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_get_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_schedule.BackupSchedule() + pb_resp = backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_schedule_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup_schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup_schedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDatabase(_BaseDatabaseAdminRestTransport._BaseGetDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.GetDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.Database: + r"""Call the get database method over HTTP. + + Args: + request (~.spanner_database_admin.GetDatabaseRequest): + The request object. The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.Database: + A Cloud Spanner database. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_http_options() + + request, metadata = self._interceptor.pre_get_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.Database() + pb_resp = spanner_database_admin.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.Database.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetDatabaseDdl") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.GetDatabaseDdlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.GetDatabaseDdlResponse: + r"""Call the get database ddl method over HTTP. + + Args: + request (~.spanner_database_admin.GetDatabaseDdlRequest): + The request object. The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.GetDatabaseDdlResponse: + The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_http_options() + + request, metadata = self._interceptor.pre_get_database_ddl(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabaseDdl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabaseDdl", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.GetDatabaseDdlResponse() + pb_resp = spanner_database_admin.GetDatabaseDdlResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_database_ddl(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_ddl_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.GetDatabaseDdlResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database_ddl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetDatabaseDdl", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_iam_policy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _InternalUpdateGraphOperation(_BaseDatabaseAdminRestTransport._BaseInternalUpdateGraphOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.InternalUpdateGraphOperation") + + def __call__(self, + request: spanner_database_admin.InternalUpdateGraphOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: + raise NotImplementedError( + "Method InternalUpdateGraphOperation is not available over REST transport" + ) + class _ListBackupOperations(_BaseDatabaseAdminRestTransport._BaseListBackupOperations, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListBackupOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.ListBackupOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup.ListBackupOperationsResponse: + r"""Call the list backup operations method over HTTP. + + Args: + request (~.backup.ListBackupOperationsRequest): + The request object. The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup.ListBackupOperationsResponse: + The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_backup_operations(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListBackupOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.ListBackupOperationsResponse() + pb_resp = backup.ListBackupOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup.ListBackupOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_operations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackups(_BaseDatabaseAdminRestTransport._BaseListBackups, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListBackups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup.ListBackupsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backup.ListBackupsRequest): + The request object. The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup.ListBackupsResponse: + The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListBackups._get_http_options() + + request, metadata = self._interceptor.pre_list_backups(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackups._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListBackups._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackups", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListBackups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup.ListBackupsResponse() + pb_resp = backup.ListBackupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup.ListBackupsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backups", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackupSchedules(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListBackupSchedules") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: backup_schedule.ListBackupSchedulesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> backup_schedule.ListBackupSchedulesResponse: + r"""Call the list backup schedules method over HTTP. + + Args: + request (~.backup_schedule.ListBackupSchedulesRequest): + The request object. The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backup_schedule.ListBackupSchedulesResponse: + The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_http_options() + + request, metadata = self._interceptor.pre_list_backup_schedules(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupSchedules", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupSchedules", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListBackupSchedules._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backup_schedule.ListBackupSchedulesResponse() + pb_resp = backup_schedule.ListBackupSchedulesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_schedules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_schedules_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = backup_schedule.ListBackupSchedulesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_schedules", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListBackupSchedules", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDatabaseOperations(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListDatabaseOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.ListDatabaseOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.ListDatabaseOperationsResponse: + r"""Call the list database operations method over HTTP. + + Args: + request (~.spanner_database_admin.ListDatabaseOperationsRequest): + The request object. The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.ListDatabaseOperationsResponse: + The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_database_operations(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListDatabaseOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.ListDatabaseOperationsResponse() + pb_resp = spanner_database_admin.ListDatabaseOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_database_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_database_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.ListDatabaseOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_operations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDatabaseRoles(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListDatabaseRoles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.ListDatabaseRolesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.ListDatabaseRolesResponse: + r"""Call the list database roles method over HTTP. + + Args: + request (~.spanner_database_admin.ListDatabaseRolesRequest): + The request object. The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.ListDatabaseRolesResponse: + The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_http_options() + + request, metadata = self._interceptor.pre_list_database_roles(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseRoles", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseRoles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListDatabaseRoles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.ListDatabaseRolesResponse() + pb_resp = spanner_database_admin.ListDatabaseRolesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_database_roles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_database_roles_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.ListDatabaseRolesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_roles", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabaseRoles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDatabases(_BaseDatabaseAdminRestTransport._BaseListDatabases, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListDatabases") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_database_admin.ListDatabasesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_database_admin.ListDatabasesResponse: + r"""Call the list databases method over HTTP. + + Args: + request (~.spanner_database_admin.ListDatabasesRequest): + The request object. The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_database_admin.ListDatabasesResponse: + The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_http_options() + + request, metadata = self._interceptor.pre_list_databases(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabases", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabases", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListDatabases._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_database_admin.ListDatabasesResponse() + pb_resp = spanner_database_admin.ListDatabasesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_database_admin.ListDatabasesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_databases", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListDatabases", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _RestoreDatabase(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.RestoreDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.RestoreDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the restore database method over HTTP. + + Args: + request (~.spanner_database_admin.RestoreDatabaseRequest): + The request object. The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_http_options() + + request, metadata = self._interceptor.pre_restore_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.RestoreDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "RestoreDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._RestoreDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_restore_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.restore_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "RestoreDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.SetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.set_iam_policy", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.TestIamPermissions", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.test_iam_permissions", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateBackup(_BaseDatabaseAdminRestTransport._BaseUpdateBackup, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateBackup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup.UpdateBackupRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> gsad_backup.Backup: + r"""Call the update backup method over HTTP. + + Args: + request (~.gsad_backup.UpdateBackupRequest): + The request object. The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gsad_backup.Backup: + A backup of a Cloud Spanner database. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_http_options() + + request, metadata = self._interceptor.pre_update_backup(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup.Backup() + pb_resp = gsad_backup.Backup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gsad_backup.Backup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateBackupSchedule") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: gsad_backup_schedule.UpdateBackupScheduleRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> gsad_backup_schedule.BackupSchedule: + r"""Call the update backup schedule method over HTTP. + + Args: + request (~.gsad_backup_schedule.UpdateBackupScheduleRequest): + The request object. The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gsad_backup_schedule.BackupSchedule: + BackupSchedule expresses the + automated backup creation specification + for a Spanner database. Next ID: 10 + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_http_options() + + request, metadata = self._interceptor.pre_update_backup_schedule(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackupSchedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackupSchedule", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gsad_backup_schedule.BackupSchedule() + pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_backup_schedule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_schedule_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup_schedule", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateBackupSchedule", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDatabase(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.UpdateDatabaseRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update database method over HTTP. + + Args: + request (~.spanner_database_admin.UpdateDatabaseRequest): + The request object. The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_http_options() + + request, metadata = self._interceptor.pre_update_database(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabase", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.UpdateDatabaseDdl") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_database_admin.UpdateDatabaseDdlRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update database ddl method over HTTP. + + Args: + request (~.spanner_database_admin.UpdateDatabaseDdlRequest): + The request object. Enqueues the given DDL statements to be applied, in + order but not necessarily all at once, to the database + schema at some point (or points) in the future. The + server checks that the statements are executable + (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon + later execution (e.g., if a statement from another batch + of statements is applied first and it conflicts in some + way, or if there is some data-related problem like a + ``NULL`` value in a column to which ``NOT NULL`` would + be added). If a statement fails, all subsequent + statements in the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be + used with the + [Operations][google.longrunning.Operations] API to + monitor progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_http_options() + + request, metadata = self._interceptor.pre_update_database_ddl(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_transcoded_request(http_options, request) + + body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabaseDdl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabaseDdl", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._UpdateDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_database_ddl(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_ddl_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database_ddl", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "UpdateDatabaseDdl", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def add_split_points(self) -> Callable[ + [spanner_database_admin.AddSplitPointsRequest], + spanner_database_admin.AddSplitPointsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AddSplitPoints(self._session, self._host, self._interceptor) # type: ignore + + @property + def copy_backup(self) -> Callable[ + [backup.CopyBackupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CopyBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup(self) -> Callable[ + [gsad_backup.CreateBackupRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.CreateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_database(self) -> Callable[ + [spanner_database_admin.CreateDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup(self) -> Callable[ + [backup.DeleteBackupRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_backup_schedule(self) -> Callable[ + [backup_schedule.DeleteBackupScheduleRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def drop_database(self) -> Callable[ + [spanner_database_admin.DropDatabaseRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DropDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup(self) -> Callable[ + [backup.GetBackupRequest], + backup.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_backup_schedule(self) -> Callable[ + [backup_schedule.GetBackupScheduleRequest], + backup_schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database(self) -> Callable[ + [spanner_database_admin.GetDatabaseRequest], + spanner_database_admin.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_database_ddl(self) -> Callable[ + [spanner_database_admin.GetDatabaseDdlRequest], + spanner_database_admin.GetDatabaseDdlResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def internal_update_graph_operation(self) -> Callable[ + [spanner_database_admin.InternalUpdateGraphOperationRequest], + spanner_database_admin.InternalUpdateGraphOperationResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InternalUpdateGraphOperation(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_operations(self) -> Callable[ + [backup.ListBackupOperationsRequest], + backup.ListBackupOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backups(self) -> Callable[ + [backup.ListBackupsRequest], + backup.ListBackupsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_backup_schedules(self) -> Callable[ + [backup_schedule.ListBackupSchedulesRequest], + backup_schedule.ListBackupSchedulesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_database_operations(self) -> Callable[ + [spanner_database_admin.ListDatabaseOperationsRequest], + spanner_database_admin.ListDatabaseOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabaseOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_database_roles(self) -> Callable[ + [spanner_database_admin.ListDatabaseRolesRequest], + spanner_database_admin.ListDatabaseRolesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabaseRoles(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_databases(self) -> Callable[ + [spanner_database_admin.ListDatabasesRequest], + spanner_database_admin.ListDatabasesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore + + @property + def restore_database(self) -> Callable[ + [spanner_database_admin.RestoreDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup(self) -> Callable[ + [gsad_backup.UpdateBackupRequest], + gsad_backup.Backup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_schedule(self) -> Callable[ + [gsad_backup_schedule.UpdateBackupScheduleRequest], + gsad_backup_schedule.BackupSchedule]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_database_ddl(self) -> Callable[ + [spanner_database_admin.UpdateDatabaseDdlRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseDatabaseAdminRestTransport._BaseCancelOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CancelOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseDatabaseAdminRestTransport._BaseDeleteOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseDatabaseAdminRestTransport._BaseGetOperation, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseDatabaseAdminRestTransport._BaseListOperations, DatabaseAdminRestStub): + def __hash__(self): + return hash("DatabaseAdminRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseDatabaseAdminRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDatabaseAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseDatabaseAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DatabaseAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'DatabaseAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py new file mode 100644 index 0000000000..0cfaa08199 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py @@ -0,0 +1,1378 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseDatabaseAdminRestTransport(DatabaseAdminTransport): + """Base REST backend transport for DatabaseAdmin. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseAddSplitPoints: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.AddSplitPointsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCopyBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/backups:copy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.CopyBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCopyBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/backups', + 'body': 'backup', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup.CreateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "backupScheduleId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', + 'body': 'backup_schedule', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup_schedule.CreateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/databases', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.DeleteBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup_schedule.DeleteBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDropDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.DropDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseDropDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.GetBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup_schedule.GetBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.GetDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDatabaseDdl: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.GetDatabaseDdlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:getIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:getIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseInternalUpdateGraphOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseListBackupOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/backupOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.ListBackupOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/backups', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup.ListBackupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackups._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListBackupSchedules: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backup_schedule.ListBackupSchedulesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabaseOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/databaseOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.ListDatabaseOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabaseRoles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.ListDatabaseRolesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDatabases: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/databases', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.ListDatabasesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabases._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRestoreDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/databases:restore', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.RestoreDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*/databases/*/databaseRoles/*}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{backup.name=projects/*/instances/*/backups/*}', + 'body': 'backup', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup.UpdateBackupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackupSchedule: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}', + 'body': 'backup_schedule', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{database.name=projects/*/instances/*/databases/*}', + 'body': 'database', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.UpdateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDatabaseDdl: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_database_admin.UpdateDatabaseDdlRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseDatabaseAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py new file mode 100644 index 0000000000..84cb902d6c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .backup import ( + Backup, + BackupInfo, + BackupInstancePartition, + CopyBackupEncryptionConfig, + CopyBackupMetadata, + CopyBackupRequest, + CreateBackupEncryptionConfig, + CreateBackupMetadata, + CreateBackupRequest, + DeleteBackupRequest, + FullBackupSpec, + GetBackupRequest, + IncrementalBackupSpec, + ListBackupOperationsRequest, + ListBackupOperationsResponse, + ListBackupsRequest, + ListBackupsResponse, + UpdateBackupRequest, +) +from .backup_schedule import ( + BackupSchedule, + BackupScheduleSpec, + CreateBackupScheduleRequest, + CrontabSpec, + DeleteBackupScheduleRequest, + GetBackupScheduleRequest, + ListBackupSchedulesRequest, + ListBackupSchedulesResponse, + UpdateBackupScheduleRequest, +) +from .common import ( + EncryptionConfig, + EncryptionInfo, + OperationProgress, + DatabaseDialect, +) +from .spanner_database_admin import ( + AddSplitPointsRequest, + AddSplitPointsResponse, + CreateDatabaseMetadata, + CreateDatabaseRequest, + Database, + DatabaseRole, + DdlStatementActionInfo, + DropDatabaseRequest, + GetDatabaseDdlRequest, + GetDatabaseDdlResponse, + GetDatabaseRequest, + InternalUpdateGraphOperationRequest, + InternalUpdateGraphOperationResponse, + ListDatabaseOperationsRequest, + ListDatabaseOperationsResponse, + ListDatabaseRolesRequest, + ListDatabaseRolesResponse, + ListDatabasesRequest, + ListDatabasesResponse, + OptimizeRestoredDatabaseMetadata, + RestoreDatabaseEncryptionConfig, + RestoreDatabaseMetadata, + RestoreDatabaseRequest, + RestoreInfo, + SplitPoints, + UpdateDatabaseDdlMetadata, + UpdateDatabaseDdlRequest, + UpdateDatabaseMetadata, + UpdateDatabaseRequest, + RestoreSourceType, +) + +__all__ = ( + 'Backup', + 'BackupInfo', + 'BackupInstancePartition', + 'CopyBackupEncryptionConfig', + 'CopyBackupMetadata', + 'CopyBackupRequest', + 'CreateBackupEncryptionConfig', + 'CreateBackupMetadata', + 'CreateBackupRequest', + 'DeleteBackupRequest', + 'FullBackupSpec', + 'GetBackupRequest', + 'IncrementalBackupSpec', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'UpdateBackupRequest', + 'BackupSchedule', + 'BackupScheduleSpec', + 'CreateBackupScheduleRequest', + 'CrontabSpec', + 'DeleteBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'UpdateBackupScheduleRequest', + 'EncryptionConfig', + 'EncryptionInfo', + 'OperationProgress', + 'DatabaseDialect', + 'AddSplitPointsRequest', + 'AddSplitPointsResponse', + 'CreateDatabaseMetadata', + 'CreateDatabaseRequest', + 'Database', + 'DatabaseRole', + 'DdlStatementActionInfo', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'GetDatabaseRequest', + 'InternalUpdateGraphOperationRequest', + 'InternalUpdateGraphOperationResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'ListDatabaseRolesRequest', + 'ListDatabaseRolesResponse', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'OptimizeRestoredDatabaseMetadata', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'RestoreDatabaseRequest', + 'RestoreInfo', + 'SplitPoints', + 'UpdateDatabaseDdlMetadata', + 'UpdateDatabaseDdlRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseRequest', + 'RestoreSourceType', +) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py new file mode 100644 index 0000000000..d89cfa44b5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py @@ -0,0 +1,1097 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'Backup', + 'CreateBackupRequest', + 'CreateBackupMetadata', + 'CopyBackupRequest', + 'CopyBackupMetadata', + 'UpdateBackupRequest', + 'GetBackupRequest', + 'DeleteBackupRequest', + 'ListBackupsRequest', + 'ListBackupsResponse', + 'ListBackupOperationsRequest', + 'ListBackupOperationsResponse', + 'BackupInfo', + 'CreateBackupEncryptionConfig', + 'CopyBackupEncryptionConfig', + 'FullBackupSpec', + 'IncrementalBackupSpec', + 'BackupInstancePartition', + }, +) + + +class Backup(proto.Message): + r"""A backup of a Cloud Spanner database. + + Attributes: + database (str): + Required for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. Name of the database from which this backup was + created. This needs to be in the same instance as the + backup. Values are of the form + ``projects//instances//databases/``. + version_time (google.protobuf.timestamp_pb2.Timestamp): + The backup will contain an externally consistent copy of the + database at the timestamp specified by ``version_time``. If + ``version_time`` is not specified, the system will set + ``version_time`` to the ``create_time`` of the backup. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. The expiration time of the backup, with + microseconds granularity that must be at least 6 hours and + at most 366 days from the time the CreateBackup request is + processed. Once the ``expire_time`` has passed, the backup + is eligible to be automatically deleted by Cloud Spanner to + free the resources used by the backup. + name (str): + Output only for the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. Required for the + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup] + operation. + + A globally unique identifier for the backup which cannot be + changed. Values are of the form + ``projects//instances//backups/[a-z][a-z0-9_\-]*[a-z0-9]`` + The final segment of the name must be between 2 and 60 + characters in length. + + The backup is stored in the location(s) specified in the + instance configuration of the instance containing the + backup, identified by the prefix of the backup name of the + form ``projects//instances/``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request is received. If the request does not specify + ``version_time``, the ``version_time`` of the backup will be + equivalent to the ``create_time``. + size_bytes (int): + Output only. Size of the backup in bytes. + freeable_size_bytes (int): + Output only. The number of bytes that will be + freed by deleting this backup. This value will + be zero if, for example, this backup is part of + an incremental backup chain and younger backups + in the chain require that we keep its data. For + backups not in an incremental backup chain, this + is always the size of the backup. This value may + change if backups on the same chain get created, + deleted or expired. + exclusive_size_bytes (int): + Output only. For a backup in an incremental + backup chain, this is the storage space needed + to keep the data that has changed since the + previous backup. For all other backups, this is + always the size of the backup. This value may + change if backups on the same chain get deleted + or expired. + + This field can be used to calculate the total + storage space used by a set of backups. For + example, the total space used by all backups of + a database can be computed by summing up this + field. + state (google.cloud.spanner_admin_database_v1.types.Backup.State): + Output only. The current state of the backup. + referencing_databases (MutableSequence[str]): + Output only. The names of the restored databases that + reference the backup. The database names are of the form + ``projects//instances//databases/``. + Referencing databases may exist in different instances. The + existence of any referencing database prevents the backup + from being deleted. When a restored database from the backup + enters the ``READY`` state, the reference to the backup is + removed. + encryption_info (google.cloud.spanner_admin_database_v1.types.EncryptionInfo): + Output only. The encryption information for + the backup. + encryption_information (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): + Output only. The encryption information for the backup, + whether it is protected by one or more KMS keys. The + information includes all Cloud KMS key versions used to + encrypt the backup. The + ``encryption_status' field inside of each``\ EncryptionInfo\` + is not populated. At least one of the key versions must be + available for the backup to be restored. If a key version is + revoked in the middle of a restore, the restore behavior is + undefined. + database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): + Output only. The database dialect information + for the backup. + referencing_backups (MutableSequence[str]): + Output only. The names of the destination backups being + created by copying this source backup. The backup names are + of the form + ``projects//instances//backups/``. + Referencing backups may exist in different instances. The + existence of any referencing backup prevents the backup from + being deleted. When the copy operation is done (either + successfully completed or cancelled or the destination + backup is deleted), the reference to the backup is removed. + max_expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The max allowed expiration time of the backup, + with microseconds granularity. A backup's expiration time + can be configured in multiple APIs: CreateBackup, + UpdateBackup, CopyBackup. When updating or copying an + existing backup, the expiration time specified must be less + than ``Backup.max_expire_time``. + backup_schedules (MutableSequence[str]): + Output only. List of backup schedule URIs + that are associated with creating this backup. + This is only applicable for scheduled backups, + and is empty for on-demand backups. + + To optimize for storage, whenever possible, + multiple schedules are collapsed together to + create one backup. In such cases, this field + captures the list of all backup schedule URIs + that are associated with creating this backup. + If collapsing is not done, then this field + captures the single backup schedule URI + associated with creating this backup. + incremental_backup_chain_id (str): + Output only. Populated only for backups in an incremental + backup chain. Backups share the same chain id if and only if + they belong to the same incremental backup chain. Use this + field to determine which backups are part of the same + incremental backup chain. The ordering of backups in the + chain can be determined by ordering the backup + ``version_time``. + oldest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Data deleted at a time older + than this is guaranteed not to be retained in + order to support this backup. For a backup in an + incremental backup chain, this is the version + time of the oldest backup that exists or ever + existed in the chain. For all other backups, + this is the version time of the backup. This + field can be used to understand what data is + being retained by the backup system. + instance_partitions (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupInstancePartition]): + Output only. The instance partition(s) storing the backup. + + This is the same as the list of the instance partition(s) + that the database had footprint in at the backup's + ``version_time``. + """ + class State(proto.Enum): + r"""Indicates the current state of the backup. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The pending backup is still being created. Operations on the + backup may fail with ``FAILED_PRECONDITION`` in this state. + READY (2): + The backup is complete and ready for use. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + database: str = proto.Field( + proto.STRING, + number=2, + ) + version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=5, + ) + freeable_size_bytes: int = proto.Field( + proto.INT64, + number=15, + ) + exclusive_size_bytes: int = proto.Field( + proto.INT64, + number=16, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + referencing_databases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + encryption_info: common.EncryptionInfo = proto.Field( + proto.MESSAGE, + number=8, + message=common.EncryptionInfo, + ) + encryption_information: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message=common.EncryptionInfo, + ) + database_dialect: common.DatabaseDialect = proto.Field( + proto.ENUM, + number=10, + enum=common.DatabaseDialect, + ) + referencing_backups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + max_expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + backup_schedules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + incremental_backup_chain_id: str = proto.Field( + proto.STRING, + number=17, + ) + oldest_version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=18, + message=timestamp_pb2.Timestamp, + ) + instance_partitions: MutableSequence['BackupInstancePartition'] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message='BackupInstancePartition', + ) + + +class CreateBackupRequest(proto.Message): + r"""The request for + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + + Attributes: + parent (str): + Required. The name of the instance in which the backup will + be created. This must be the same instance that contains the + database the backup will be created from. The backup will be + stored in the location(s) specified in the instance + configuration of this instance. Values are of the form + ``projects//instances/``. + backup_id (str): + Required. The id of the backup to be created. The + ``backup_id`` appended to ``parent`` forms the full backup + name of the form + ``projects//instances//backups/``. + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to create. + encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): + Optional. The encryption configuration used to encrypt the + backup. If this field is not specified, the backup will use + the same encryption configuration as the database by + default, namely + [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] + = ``USE_DATABASE_ENCRYPTION``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: 'Backup' = proto.Field( + proto.MESSAGE, + number=3, + message='Backup', + ) + encryption_config: 'CreateBackupEncryptionConfig' = proto.Field( + proto.MESSAGE, + number=4, + message='CreateBackupEncryptionConfig', + ) + + +class CreateBackupMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. + + Attributes: + name (str): + The name of the backup being created. + database (str): + The name of the database the backup is + created from. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of this operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=3, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class CopyBackupRequest(proto.Message): + r"""The request for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + + Attributes: + parent (str): + Required. The name of the destination instance that will + contain the backup copy. Values are of the form: + ``projects//instances/``. + backup_id (str): + Required. The id of the backup copy. The ``backup_id`` + appended to ``parent`` forms the full backup_uri of the form + ``projects//instances//backups/``. + source_backup (str): + Required. The source backup to be copied. The source backup + needs to be in READY state for it to be copied. Once + CopyBackup is in progress, the source backup cannot be + deleted or cleaned up on expiration until CopyBackup is + finished. Values are of the form: + ``projects//instances//backups/``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Required. The expiration time of the backup in microsecond + granularity. The expiration time must be at least 6 hours + and at most 366 days from the ``create_time`` of the source + backup. Once the ``expire_time`` has passed, the backup is + eligible to be automatically deleted by Cloud Spanner to + free the resources used by the backup. + encryption_config (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig): + Optional. The encryption configuration used to encrypt the + backup. If this field is not specified, the backup will use + the same encryption configuration as the source backup by + default, namely + [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] + = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_id: str = proto.Field( + proto.STRING, + number=2, + ) + source_backup: str = proto.Field( + proto.STRING, + number=3, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + encryption_config: 'CopyBackupEncryptionConfig' = proto.Field( + proto.MESSAGE, + number=5, + message='CopyBackupEncryptionConfig', + ) + + +class CopyBackupMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. + + Attributes: + name (str): + The name of the backup being created through the copy + operation. Values are of the form + ``projects//instances//backups/``. + source_backup (str): + The name of the source backup that is being copied. Values + are of the form + ``projects//instances//backups/``. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of CopyBackup operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_backup: str = proto.Field( + proto.STRING, + number=2, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=3, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateBackupRequest(proto.Message): + r"""The request for + [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. + + Attributes: + backup (google.cloud.spanner_admin_database_v1.types.Backup): + Required. The backup to update. ``backup.name``, and the + fields to be updated as specified by ``update_mask`` are + required. Other fields are ignored. Update is only supported + for the following fields: + + - ``backup.expire_time``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields (e.g. + ``expire_time``) in the Backup resource should be updated. + This mask is relative to the Backup resource, not to the + request message. The field mask must always be specified; + this prevents any future fields from being erased + accidentally by clients that do not know about them. + """ + + backup: 'Backup' = proto.Field( + proto.MESSAGE, + number=1, + message='Backup', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetBackupRequest(proto.Message): + r"""The request for + [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. + + Attributes: + name (str): + Required. Name of the backup. Values are of the form + ``projects//instances//backups/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupRequest(proto.Message): + r"""The request for + [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. + + Attributes: + name (str): + Required. Name of the backup to delete. Values are of the + form + ``projects//instances//backups/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupsRequest(proto.Message): + r"""The request for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Attributes: + parent (str): + Required. The instance to list backups from. Values are of + the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned backups. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Backup][google.spanner.admin.database.v1.Backup] are + eligible for filtering: + + - ``name`` + - ``database`` + - ``state`` + - ``create_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``expire_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``version_time`` (and values are of the format + YYYY-MM-DDTHH:MM:SSZ) + - ``size_bytes`` + - ``backup_schedules`` + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic, but you can specify AND, OR, and + NOT logic explicitly. + + Here are a few examples: + + - ``name:Howl`` - The backup's name contains the string + "howl". + - ``database:prod`` - The database's name contains the + string "prod". + - ``state:CREATING`` - The backup is pending creation. + - ``state:READY`` - The backup is fully created and ready + for use. + - ``(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`` + - The backup name contains the string "howl" and + ``create_time`` of the backup is before + 2018-03-28T14:50:00Z. + - ``expire_time < \"2018-03-28T14:50:00Z\"`` - The backup + ``expire_time`` is before 2018-03-28T14:50:00Z. + - ``size_bytes > 10000000000`` - The backup's size is + greater than 10GB + - ``backup_schedules:daily`` - The backup is created from a + schedule with "daily" in its name. + page_size (int): + Number of backups to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupsResponse.next_page_token] + from a previous + [ListBackupsResponse][google.spanner.admin.database.v1.ListBackupsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupsResponse(proto.Message): + r"""The response for + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. + + Attributes: + backups (MutableSequence[google.cloud.spanner_admin_database_v1.types.Backup]): + The list of matching backups. Backups returned are ordered + by ``create_time`` in descending order, starting from the + most recent ``create_time``. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups] + call to fetch more of the matching backups. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence['Backup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Backup', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListBackupOperationsRequest(proto.Message): + r"""The request for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Attributes: + parent (str): + Required. The instance of the backup operations. Values are + of the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned backup + operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [operation][google.longrunning.Operation] are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] + is + ``type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first if filtering on + metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic, but you can specify AND, OR, and + NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``metadata.database:prod`` - Returns operations where: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + - The source database name of backup contains the string + "prod". + + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``(metadata.name:howl) AND`` + ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. + - The backup name contains the string "howl". + - The operation started before 2018-03-28T14:50:00Z. + - The operation resulted in an error. + + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` + ``(metadata.source_backup:test) AND`` + ``(metadata.progress.start_time < \"2022-01-18T14:50:00Z\") AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. + - The source backup name contains the string "test". + - The operation started before 2022-01-18T14:50:00Z. + - The operation resulted in an error. + + - ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` + ``(metadata.database:test_db)) OR`` + ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` + ``(metadata.source_backup:test_bkp)) AND`` + ``(error:*)`` - Returns operations where: + + - The operation's metadata matches either of criteria: + + - The operation's metadata type is + [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] + AND the source database name of the backup contains + the string "test_db" + - The operation's metadata type is + [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata] + AND the source backup name contains the string + "test_bkp" + + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupOperationsResponse.next_page_token] + from a previous + [ListBackupOperationsResponse][google.spanner.admin.database.v1.ListBackupOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupOperationsResponse(proto.Message): + r"""The response for + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching backup [long-running + operations][google.longrunning.Operation]. Each operation's + name will be prefixed by the backup's name. The operation's + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that are pending or have + completed/failed/canceled within the last 7 days. Operations + returned are ordered by + ``operation.metadata.value.progress.start_time`` in + descending order starting from the most recently started + operation. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class BackupInfo(proto.Message): + r"""Information about a backup. + + Attributes: + backup (str): + Name of the backup. + version_time (google.protobuf.timestamp_pb2.Timestamp): + The backup contains an externally consistent copy of + ``source_database`` at the timestamp specified by + ``version_time``. If the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request did not specify ``version_time``, the + ``version_time`` of the backup is equivalent to the + ``create_time``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the + [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] + request was received. + source_database (str): + Name of the database the backup was created + from. + """ + + backup: str = proto.Field( + proto.STRING, + number=1, + ) + version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + source_database: str = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateBackupEncryptionConfig(proto.Message): + r"""Encryption configuration for the backup to create. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): + Required. The encryption type of the backup. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to protect the + backup. This field should be set only when + [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Optional. Specifies the KMS configuration for the one or + more keys used to protect the backup. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + + The keys referenced by kms_key_names must fully cover all + regions of the backup's instance configuration. Some + examples: + + - For single region instance configs, specify a single + regional location KMS key. + - For multi-regional instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For an instance config of type USER_MANAGED, please + specify only regional location KMS keys to cover each + region in the instance config. Multi-regional location KMS + keys are not supported for USER_MANAGED instance configs. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the backup. + + Values: + ENCRYPTION_TYPE_UNSPECIFIED (0): + Unspecified. Do not use. + USE_DATABASE_ENCRYPTION (1): + Use the same encryption configuration as the database. This + is the default option when + [encryption_config][google.spanner.admin.database.v1.CreateBackupEncryptionConfig] + is empty. For example, if the database is using + ``Customer_Managed_Encryption``, the backup will be using + the same Cloud KMS key as the database. + GOOGLE_DEFAULT_ENCRYPTION (2): + Use Google default encryption. + CUSTOMER_MANAGED_ENCRYPTION (3): + Use customer managed encryption. If specified, + ``kms_key_name`` must contain a valid Cloud KMS key. + """ + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_DATABASE_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type: EncryptionType = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CopyBackupEncryptionConfig(proto.Message): + r"""Encryption configuration for the copied backup. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig.EncryptionType): + Required. The encryption type of the backup. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to protect the + backup. This field should be set only when + [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Optional. Specifies the KMS configuration for the one or + more keys used to protect the backup. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + Kms keys specified can be in any order. + + The keys referenced by kms_key_names must fully cover all + regions of the backup's instance configuration. Some + examples: + + - For single region instance configs, specify a single + regional location KMS key. + - For multi-regional instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For an instance config of type USER_MANAGED, please + specify only regional location KMS keys to cover each + region in the instance config. Multi-regional location KMS + keys are not supported for USER_MANAGED instance configs. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the backup. + + Values: + ENCRYPTION_TYPE_UNSPECIFIED (0): + Unspecified. Do not use. + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): + This is the default option for + [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] + when + [encryption_config][google.spanner.admin.database.v1.CopyBackupEncryptionConfig] + is not specified. For example, if the source backup is using + ``Customer_Managed_Encryption``, the backup will be using + the same Cloud KMS key as the source backup. + GOOGLE_DEFAULT_ENCRYPTION (2): + Use Google default encryption. + CUSTOMER_MANAGED_ENCRYPTION (3): + Use customer managed encryption. If specified, either + ``kms_key_name`` or ``kms_key_names`` must contain valid + Cloud KMS key(s). + """ + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type: EncryptionType = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class FullBackupSpec(proto.Message): + r"""The specification for full backups. + A full backup stores the entire contents of the database at a + given version time. + + """ + + +class IncrementalBackupSpec(proto.Message): + r"""The specification for incremental backup chains. + An incremental backup stores the delta of changes between a + previous backup and the database contents at a given version + time. An incremental backup chain consists of a full backup and + zero or more successive incremental backups. The first backup + created for an incremental backup chain is always a full backup. + + """ + + +class BackupInstancePartition(proto.Message): + r"""Instance partition information for the backup. + + Attributes: + instance_partition (str): + A unique identifier for the instance partition. Values are + of the form + ``projects//instances//instancePartitions/`` + """ + + instance_partition: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py new file mode 100644 index 0000000000..b69e3b08ee --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'BackupScheduleSpec', + 'BackupSchedule', + 'CrontabSpec', + 'CreateBackupScheduleRequest', + 'GetBackupScheduleRequest', + 'DeleteBackupScheduleRequest', + 'ListBackupSchedulesRequest', + 'ListBackupSchedulesResponse', + 'UpdateBackupScheduleRequest', + }, +) + + +class BackupScheduleSpec(proto.Message): + r"""Defines specifications of the backup schedule. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cron_spec (google.cloud.spanner_admin_database_v1.types.CrontabSpec): + Cron style schedule specification. + + This field is a member of `oneof`_ ``schedule_spec``. + """ + + cron_spec: 'CrontabSpec' = proto.Field( + proto.MESSAGE, + number=1, + oneof='schedule_spec', + message='CrontabSpec', + ) + + +class BackupSchedule(proto.Message): + r"""BackupSchedule expresses the automated backup creation + specification for a Spanner database. + Next ID: 10 + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Identifier. Output only for the + [CreateBackupSchedule][DatabaseAdmin.CreateBackupSchededule] + operation. Required for the + [UpdateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule] + operation. A globally unique identifier for the backup + schedule which cannot be changed. Values are of the form + ``projects//instances//databases//backupSchedules/[a-z][a-z0-9_\-]*[a-z0-9]`` + The final segment of the name must be between 2 and 60 + characters in length. + spec (google.cloud.spanner_admin_database_v1.types.BackupScheduleSpec): + Optional. The schedule specification based on + which the backup creations are triggered. + retention_duration (google.protobuf.duration_pb2.Duration): + Optional. The retention duration of a backup + that must be at least 6 hours and at most 366 + days. The backup is eligible to be automatically + deleted once the retention period has elapsed. + encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): + Optional. The encryption configuration that + will be used to encrypt the backup. If this + field is not specified, the backup will use the + same encryption configuration as the database. + full_backup_spec (google.cloud.spanner_admin_database_v1.types.FullBackupSpec): + The schedule creates only full backups. + + This field is a member of `oneof`_ ``backup_type_spec``. + incremental_backup_spec (google.cloud.spanner_admin_database_v1.types.IncrementalBackupSpec): + The schedule creates incremental backup + chains. + + This field is a member of `oneof`_ ``backup_type_spec``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp at which the + schedule was last updated. If the schedule has + never been updated, this field contains the + timestamp when the schedule was first created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + spec: 'BackupScheduleSpec' = proto.Field( + proto.MESSAGE, + number=6, + message='BackupScheduleSpec', + ) + retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + encryption_config: backup.CreateBackupEncryptionConfig = proto.Field( + proto.MESSAGE, + number=4, + message=backup.CreateBackupEncryptionConfig, + ) + full_backup_spec: backup.FullBackupSpec = proto.Field( + proto.MESSAGE, + number=7, + oneof='backup_type_spec', + message=backup.FullBackupSpec, + ) + incremental_backup_spec: backup.IncrementalBackupSpec = proto.Field( + proto.MESSAGE, + number=8, + oneof='backup_type_spec', + message=backup.IncrementalBackupSpec, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +class CrontabSpec(proto.Message): + r"""CrontabSpec can be used to specify the version time and + frequency at which the backup should be created. + + Attributes: + text (str): + Required. Textual representation of the crontab. User can + customize the backup frequency and the backup version time + using the cron expression. The version time must be in UTC + timezone. + + The backup will contain an externally consistent copy of the + database at the version time. Allowed frequencies are 12 + hour, 1 day, 1 week and 1 month. Examples of valid cron + specifications: + + - ``0 2/12 * * *`` : every 12 hours at (2, 14) hours past + midnight in UTC. + - ``0 2,14 * * *`` : every 12 hours at (2,14) hours past + midnight in UTC. + - ``0 2 * * *`` : once a day at 2 past midnight in UTC. + - ``0 2 * * 0`` : once a week every Sunday at 2 past + midnight in UTC. + - ``0 2 8 * *`` : once a month on 8th day at 2 past midnight + in UTC. + time_zone (str): + Output only. The time zone of the times in + ``CrontabSpec.text``. Currently only UTC is supported. + creation_window (google.protobuf.duration_pb2.Duration): + Output only. Schedule backups will contain an externally + consistent copy of the database at the version time + specified in ``schedule_spec.cron_spec``. However, Spanner + may not initiate the creation of the scheduled backups at + that version time. Spanner will initiate the creation of + scheduled backups within the time window bounded by the + version_time specified in ``schedule_spec.cron_spec`` and + version_time + ``creation_window``. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + time_zone: str = proto.Field( + proto.STRING, + number=2, + ) + creation_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class CreateBackupScheduleRequest(proto.Message): + r"""The request for + [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. + + Attributes: + parent (str): + Required. The name of the database that this + backup schedule applies to. + backup_schedule_id (str): + Required. The Id to use for the backup schedule. The + ``backup_schedule_id`` appended to ``parent`` forms the full + backup schedule name of the form + ``projects//instances//databases//backupSchedules/``. + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + backup_schedule_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup_schedule: 'BackupSchedule' = proto.Field( + proto.MESSAGE, + number=3, + message='BackupSchedule', + ) + + +class GetBackupScheduleRequest(proto.Message): + r"""The request for + [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. + + Attributes: + name (str): + Required. The name of the schedule to retrieve. Values are + of the form + ``projects//instances//databases//backupSchedules/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBackupScheduleRequest(proto.Message): + r"""The request for + [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. + + Attributes: + name (str): + Required. The name of the schedule to delete. Values are of + the form + ``projects//instances//databases//backupSchedules/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupSchedulesRequest(proto.Message): + r"""The request for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Attributes: + parent (str): + Required. Database is the parent resource + whose backup schedules should be listed. Values + are of the form + projects//instances//databases/ + page_size (int): + Optional. Number of backup schedules to be + returned in the response. If 0 or less, defaults + to the server's maximum allowed page size. + page_token (str): + Optional. If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListBackupSchedulesResponse.next_page_token] + from a previous + [ListBackupSchedulesResponse][google.spanner.admin.database.v1.ListBackupSchedulesResponse] + to the same ``parent``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListBackupSchedulesResponse(proto.Message): + r"""The response for + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. + + Attributes: + backup_schedules (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupSchedule]): + The list of backup schedules for a database. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules] + call to fetch more of the schedules. + """ + + @property + def raw_page(self): + return self + + backup_schedules: MutableSequence['BackupSchedule'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BackupSchedule', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateBackupScheduleRequest(proto.Message): + r"""The request for + [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. + + Attributes: + backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): + Required. The backup schedule to update. + ``backup_schedule.name``, and the fields to be updated as + specified by ``update_mask`` are required. Other fields are + ignored. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + the BackupSchedule resource should be updated. + This mask is relative to the BackupSchedule + resource, not to the request message. The field + mask must always be specified; this prevents any + future fields from being erased accidentally. + """ + + backup_schedule: 'BackupSchedule' = proto.Field( + proto.MESSAGE, + number=1, + message='BackupSchedule', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py new file mode 100644 index 0000000000..40606ba0e5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'DatabaseDialect', + 'OperationProgress', + 'EncryptionConfig', + 'EncryptionInfo', + }, +) + + +class DatabaseDialect(proto.Enum): + r"""Indicates the dialect type of a database. + + Values: + DATABASE_DIALECT_UNSPECIFIED (0): + Default value. This value will create a database with the + GOOGLE_STANDARD_SQL dialect. + GOOGLE_STANDARD_SQL (1): + GoogleSQL supported SQL. + POSTGRESQL (2): + PostgreSQL supported SQL. + """ + DATABASE_DIALECT_UNSPECIFIED = 0 + GOOGLE_STANDARD_SQL = 1 + POSTGRESQL = 2 + + +class OperationProgress(proto.Message): + r"""Encapsulates progress related information for a Cloud Spanner + long running operation. + + Attributes: + progress_percent (int): + Percent completion of the operation. + Values are between 0 and 100 inclusive. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time the request was received. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If set, the time at which this operation + failed or was completed successfully. + """ + + progress_percent: int = proto.Field( + proto.INT32, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class EncryptionConfig(proto.Message): + r"""Encryption configuration for a Cloud Spanner database. + + Attributes: + kms_key_name (str): + The Cloud KMS key to be used for encrypting and decrypting + the database. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Specifies the KMS configuration for the one or more keys + used to encrypt the database. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + + The keys referenced by kms_key_names must fully cover all + regions of the database instance configuration. Some + examples: + + - For single region database instance configs, specify a + single regional location KMS key. + - For multi-regional database instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For a database instance config of type USER_MANAGED, + please specify only regional location KMS keys to cover + each region in the instance config. Multi-regional + location KMS keys are not supported for USER_MANAGED + instance configs. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class EncryptionInfo(proto.Message): + r"""Encryption information for a Cloud Spanner database or + backup. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.EncryptionInfo.Type): + Output only. The type of encryption. + encryption_status (google.rpc.status_pb2.Status): + Output only. If present, the status of a + recent encrypt/decrypt call on underlying data + for this database or backup. Regardless of + status, data is always encrypted at rest. + kms_key_version (str): + Output only. A Cloud KMS key version that is + being used to protect the database or backup. + """ + class Type(proto.Enum): + r"""Possible encryption types. + + Values: + TYPE_UNSPECIFIED (0): + Encryption type was not specified, though + data at rest remains encrypted. + GOOGLE_DEFAULT_ENCRYPTION (1): + The data is encrypted at rest with a key that + is fully managed by Google. No key version or + status will be populated. This is the default + state. + CUSTOMER_MANAGED_ENCRYPTION (2): + The data is encrypted at rest with a key that is managed by + the customer. The active version of the key. + ``kms_key_version`` will be populated, and + ``encryption_status`` may be populated. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DEFAULT_ENCRYPTION = 1 + CUSTOMER_MANAGED_ENCRYPTION = 2 + + encryption_type: Type = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + encryption_status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=4, + message=status_pb2.Status, + ) + kms_key_version: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py new file mode 100644 index 0000000000..3d882c8443 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py @@ -0,0 +1,1348 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.database.v1', + manifest={ + 'RestoreSourceType', + 'RestoreInfo', + 'Database', + 'ListDatabasesRequest', + 'ListDatabasesResponse', + 'CreateDatabaseRequest', + 'CreateDatabaseMetadata', + 'GetDatabaseRequest', + 'UpdateDatabaseRequest', + 'UpdateDatabaseMetadata', + 'UpdateDatabaseDdlRequest', + 'DdlStatementActionInfo', + 'UpdateDatabaseDdlMetadata', + 'DropDatabaseRequest', + 'GetDatabaseDdlRequest', + 'GetDatabaseDdlResponse', + 'ListDatabaseOperationsRequest', + 'ListDatabaseOperationsResponse', + 'RestoreDatabaseRequest', + 'RestoreDatabaseEncryptionConfig', + 'RestoreDatabaseMetadata', + 'OptimizeRestoredDatabaseMetadata', + 'DatabaseRole', + 'ListDatabaseRolesRequest', + 'ListDatabaseRolesResponse', + 'AddSplitPointsRequest', + 'AddSplitPointsResponse', + 'SplitPoints', + 'InternalUpdateGraphOperationRequest', + 'InternalUpdateGraphOperationResponse', + }, +) + + +class RestoreSourceType(proto.Enum): + r"""Indicates the type of the restore source. + + Values: + TYPE_UNSPECIFIED (0): + No restore associated. + BACKUP (1): + A backup was used as the source of the + restore. + """ + TYPE_UNSPECIFIED = 0 + BACKUP = 1 + + +class RestoreInfo(proto.Message): + r"""Information about the database restore. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): + The type of the restore source. + backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): + Information about the backup used to restore + the database. The backup may no longer exist. + + This field is a member of `oneof`_ ``source_info``. + """ + + source_type: 'RestoreSourceType' = proto.Field( + proto.ENUM, + number=1, + enum='RestoreSourceType', + ) + backup_info: gsad_backup.BackupInfo = proto.Field( + proto.MESSAGE, + number=2, + oneof='source_info', + message=gsad_backup.BackupInfo, + ) + + +class Database(proto.Message): + r"""A Cloud Spanner database. + + Attributes: + name (str): + Required. The name of the database. Values are of the form + ``projects//instances//databases/``, + where ```` is as specified in the + ``CREATE DATABASE`` statement. This name can be passed to + other API methods to identify the database. + state (google.cloud.spanner_admin_database_v1.types.Database.State): + Output only. The current database state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If exists, the time at which the + database creation started. + restore_info (google.cloud.spanner_admin_database_v1.types.RestoreInfo): + Output only. Applicable only for restored + databases. Contains information about the + restore source. + encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): + Output only. For databases that are using + customer managed encryption, this field contains + the encryption configuration for the database. + For databases that are using Google default or + other types of encryption, this field is empty. + encryption_info (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): + Output only. For databases that are using customer managed + encryption, this field contains the encryption information + for the database, such as all Cloud KMS key versions that + are in use. The + ``encryption_status' field inside of each``\ EncryptionInfo\` + is not populated. + + For databases that are using Google default or other types + of encryption, this field is empty. + + This field is propagated lazily from the backend. There + might be a delay from when a key version is being used and + when it appears in this field. + version_retention_period (str): + Output only. The period in which Cloud Spanner retains all + versions of data for the database. This is the same as the + value of version_retention_period database option set using + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + Defaults to 1 hour, if not set. + earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Earliest timestamp at which + older versions of the data can be read. This + value is continuously updated by Cloud Spanner + and becomes stale the moment it is queried. If + you are using this value to recover data, make + sure to account for the time from the moment + when the value is queried to the moment when you + initiate the recovery. + default_leader (str): + Output only. The read-write region which contains the + database's leader replicas. + + This is the same as the value of default_leader database + option set using DatabaseAdmin.CreateDatabase or + DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this + is empty. + database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): + Output only. The dialect of the Cloud Spanner + Database. + enable_drop_protection (bool): + Whether drop protection is enabled for this database. + Defaults to false, if not set. For more details, please see + how to `prevent accidental database + deletion `__. + reconciling (bool): + Output only. If true, the database is being + updated. If false, there are no ongoing update + operations for the database. + """ + class State(proto.Enum): + r"""Indicates the current state of the database. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The database is still being created. Operations on the + database may fail with ``FAILED_PRECONDITION`` in this + state. + READY (2): + The database is fully created and ready for + use. + READY_OPTIMIZING (3): + The database is fully created and ready for use, but is + still being optimized for performance and cannot handle full + load. + + In this state, the database still references the backup it + was restore from, preventing the backup from being deleted. + When optimizations are complete, the full performance of the + database will be restored, and the database will transition + to ``READY`` state. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + READY_OPTIMIZING = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: State = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + restore_info: 'RestoreInfo' = proto.Field( + proto.MESSAGE, + number=4, + message='RestoreInfo', + ) + encryption_config: common.EncryptionConfig = proto.Field( + proto.MESSAGE, + number=5, + message=common.EncryptionConfig, + ) + encryption_info: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=common.EncryptionInfo, + ) + version_retention_period: str = proto.Field( + proto.STRING, + number=6, + ) + earliest_version_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + default_leader: str = proto.Field( + proto.STRING, + number=9, + ) + database_dialect: common.DatabaseDialect = proto.Field( + proto.ENUM, + number=10, + enum=common.DatabaseDialect, + ) + enable_drop_protection: bool = proto.Field( + proto.BOOL, + number=11, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=12, + ) + + +class ListDatabasesRequest(proto.Message): + r"""The request for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Attributes: + parent (str): + Required. The instance whose databases should be listed. + Values are of the form + ``projects//instances/``. + page_size (int): + Number of databases to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] + from a previous + [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabasesResponse(proto.Message): + r"""The response for + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. + + Attributes: + databases (MutableSequence[google.cloud.spanner_admin_database_v1.types.Database]): + Databases that matched the request. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] + call to fetch more of the matching databases. + """ + + @property + def raw_page(self): + return self + + databases: MutableSequence['Database'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Database', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDatabaseRequest(proto.Message): + r"""The request for + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + + Attributes: + parent (str): + Required. The name of the instance that will serve the new + database. Values are of the form + ``projects//instances/``. + create_statement (str): + Required. A ``CREATE DATABASE`` statement, which specifies + the ID of the new database. The database ID must conform to + the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be + between 2 and 30 characters in length. If the database ID is + a reserved word or if it contains a hyphen, the database ID + must be enclosed in backticks (:literal:`\``). + extra_statements (MutableSequence[str]): + Optional. A list of DDL statements to run + inside the newly created database. Statements + can create tables, indexes, etc. These + statements execute atomically with the creation + of the database: + + if there is an error in any statement, the + database is not created. + encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): + Optional. The encryption configuration for + the database. If this field is not specified, + Cloud Spanner will encrypt/decrypt all data at + rest using Google default encryption. + database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): + Optional. The dialect of the Cloud Spanner + Database. + proto_descriptors (bytes): + Optional. Proto descriptors used by CREATE/ALTER PROTO + BUNDLE statements in 'extra_statements' above. Contains a + protobuf-serialized + `google.protobuf.FileDescriptorSet `__. + To generate it, + `install `__ and + run ``protoc`` with --include_imports and + --descriptor_set_out. For example, to generate for + moon/shot/app.proto, run + + :: + + $protoc --proto_path=/app_path --proto_path=/lib_path \ + --include_imports \ + --descriptor_set_out=descriptors.data \ + moon/shot/app.proto + + For more details, see protobuffer `self + description `__. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + create_statement: str = proto.Field( + proto.STRING, + number=2, + ) + extra_statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + encryption_config: common.EncryptionConfig = proto.Field( + proto.MESSAGE, + number=4, + message=common.EncryptionConfig, + ) + database_dialect: common.DatabaseDialect = proto.Field( + proto.ENUM, + number=5, + enum=common.DatabaseDialect, + ) + proto_descriptors: bytes = proto.Field( + proto.BYTES, + number=6, + ) + + +class CreateDatabaseMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. + + Attributes: + database (str): + The database being created. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseRequest(proto.Message): + r"""The request for + [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. + + Attributes: + name (str): + Required. The name of the requested database. Values are of + the form + ``projects//instances//databases/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDatabaseRequest(proto.Message): + r"""The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + + Attributes: + database (google.cloud.spanner_admin_database_v1.types.Database): + Required. The database to update. The ``name`` field of the + database is of the form + ``projects//instances//databases/``. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, only + ``enable_drop_protection`` field can be updated. + """ + + database: 'Database' = proto.Field( + proto.MESSAGE, + number=1, + message='Database', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class UpdateDatabaseMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + + Attributes: + request (google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest): + The request for + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is + best-effort). + """ + + request: 'UpdateDatabaseRequest' = proto.Field( + proto.MESSAGE, + number=1, + message='UpdateDatabaseRequest', + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateDatabaseDdlRequest(proto.Message): + r"""Enqueues the given DDL statements to be applied, in order but not + necessarily all at once, to the database schema at some point (or + points) in the future. The server checks that the statements are + executable (syntactically valid, name tables that exist, etc.) + before enqueueing them, but they may still fail upon later execution + (e.g., if a statement from another batch of statements is applied + first and it conflicts in some way, or if there is some data-related + problem like a ``NULL`` value in a column to which ``NOT NULL`` + would be added). If a statement fails, all subsequent statements in + the batch are automatically cancelled. + + Each batch of statements is assigned a name which can be used with + the [Operations][google.longrunning.Operations] API to monitor + progress. See the + [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] + field for more details. + + Attributes: + database (str): + Required. The database to update. + statements (MutableSequence[str]): + Required. DDL statements to be applied to the + database. + operation_id (str): + If empty, the new update request is assigned an + automatically-generated operation ID. Otherwise, + ``operation_id`` is used to construct the name of the + resulting [Operation][google.longrunning.Operation]. + + Specifying an explicit operation ID simplifies determining + whether the statements were executed in the event that the + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + call is replayed, or the return value is otherwise lost: the + [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] + and ``operation_id`` fields can be combined to form the + [name][google.longrunning.Operation.name] of the resulting + [longrunning.Operation][google.longrunning.Operation]: + ``/operations/``. + + ``operation_id`` should be unique within the database, and + must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that + automatically-generated operation IDs always begin with an + underscore. If the named operation already exists, + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + returns ``ALREADY_EXISTS``. + proto_descriptors (bytes): + Optional. Proto descriptors used by CREATE/ALTER PROTO + BUNDLE statements. Contains a protobuf-serialized + `google.protobuf.FileDescriptorSet `__. + To generate it, + `install `__ and + run ``protoc`` with --include_imports and + --descriptor_set_out. For example, to generate for + moon/shot/app.proto, run + + :: + + $protoc --proto_path=/app_path --proto_path=/lib_path \ + --include_imports \ + --descriptor_set_out=descriptors.data \ + moon/shot/app.proto + + For more details, see protobuffer `self + description `__. + throughput_mode (bool): + Optional. This field is exposed to be used by the Spanner + Migration Tool. For more details, see + `SMT `__. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + operation_id: str = proto.Field( + proto.STRING, + number=3, + ) + proto_descriptors: bytes = proto.Field( + proto.BYTES, + number=4, + ) + throughput_mode: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class DdlStatementActionInfo(proto.Message): + r"""Action information extracted from a DDL statement. This proto is + used to display the brief info of the DDL statement for the + operation + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + + Attributes: + action (str): + The action for the DDL statement, e.g. + CREATE, ALTER, DROP, GRANT, etc. This field is a + non-empty string. + entity_type (str): + The entity type for the DDL statement, e.g. TABLE, INDEX, + VIEW, etc. This field can be empty string for some DDL + statement, e.g. for statement "ANALYZE", ``entity_type`` = + "". + entity_names (MutableSequence[str]): + The entity name(s) being operated on the DDL statement. E.g. + + 1. For statement "CREATE TABLE t1(...)", ``entity_names`` = + ["t1"]. + 2. For statement "GRANT ROLE r1, r2 ...", ``entity_names`` = + ["r1", "r2"]. + 3. For statement "ANALYZE", ``entity_names`` = []. + """ + + action: str = proto.Field( + proto.STRING, + number=1, + ) + entity_type: str = proto.Field( + proto.STRING, + number=2, + ) + entity_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class UpdateDatabaseDdlMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. + + Attributes: + database (str): + The database being modified. + statements (MutableSequence[str]): + For an update this list contains all the + statements. For an individual statement, this + list contains only that statement. + commit_timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): + Reports the commit timestamps of all statements that have + succeeded so far, where ``commit_timestamps[i]`` is the + commit timestamp for the statement ``statements[i]``. + throttled (bool): + Output only. When true, indicates that the + operation is throttled e.g. due to resource + constraints. When resources become available the + operation will resume and this field will be + false again. + progress (MutableSequence[google.cloud.spanner_admin_database_v1.types.OperationProgress]): + The progress of the + [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + operations. All DDL statements will have continuously + updating progress, and ``progress[i]`` is the operation + progress for ``statements[i]``. Also, ``progress[i]`` will + have start time and end time populated with commit timestamp + of operation, as well as a progress of 100% once the + operation has completed. + actions (MutableSequence[google.cloud.spanner_admin_database_v1.types.DdlStatementActionInfo]): + The brief action info for the DDL statements. ``actions[i]`` + is the brief info for ``statements[i]``. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + commit_timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + throttled: bool = proto.Field( + proto.BOOL, + number=4, + ) + progress: MutableSequence[common.OperationProgress] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=common.OperationProgress, + ) + actions: MutableSequence['DdlStatementActionInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='DdlStatementActionInfo', + ) + + +class DropDatabaseRequest(proto.Message): + r"""The request for + [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. + + Attributes: + database (str): + Required. The database to be dropped. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseDdlRequest(proto.Message): + r"""The request for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + Attributes: + database (str): + Required. The database whose schema we wish to get. Values + are of the form + ``projects//instances//databases/`` + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetDatabaseDdlResponse(proto.Message): + r"""The response for + [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. + + Attributes: + statements (MutableSequence[str]): + A list of formatted DDL statements defining + the schema of the database specified in the + request. + proto_descriptors (bytes): + Proto descriptors stored in the database. Contains a + protobuf-serialized + `google.protobuf.FileDescriptorSet `__. + For more details, see protobuffer `self + description `__. + """ + + statements: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + proto_descriptors: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class ListDatabaseOperationsRequest(proto.Message): + r"""The request for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Attributes: + parent (str): + Required. The instance of the database operations. Values + are of the form ``projects//instances/``. + filter (str): + An expression that filters the list of returned operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the + [Operation][google.longrunning.Operation] are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata] + is + ``type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first, if filtering + on metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND`` + ``(metadata.source_type:BACKUP) AND`` + ``(metadata.backup_info.backup:backup_howl) AND`` + ``(metadata.name:restored_howl) AND`` + ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. + - The database is restored from a backup. + - The backup name contains "backup_howl". + - The restored database's name contains "restored_howl". + - The operation started before 2018-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabaseOperationsResponse.next_page_token] + from a previous + [ListDatabaseOperationsResponse][google.spanner.admin.database.v1.ListDatabaseOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListDatabaseOperationsResponse(proto.Message): + r"""The response for + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching database [long-running + operations][google.longrunning.Operation]. Each operation's + name will be prefixed by the database's name. The + operation's + [metadata][google.longrunning.Operation.metadata] field type + ``metadata.type_url`` describes the type of the metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RestoreDatabaseRequest(proto.Message): + r"""The request for + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the instance in which to create the + restored database. This instance must be in the same project + and have the same instance configuration as the instance + containing the source backup. Values are of the form + ``projects//instances/``. + database_id (str): + Required. The id of the database to create and restore to. + This database must not already exist. The ``database_id`` + appended to ``parent`` forms the full database name of the + form + ``projects//instances//databases/``. + backup (str): + Name of the backup from which to restore. Values are of the + form + ``projects//instances//backups/``. + + This field is a member of `oneof`_ ``source``. + encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): + Optional. An encryption configuration describing the + encryption type and key resources in Cloud KMS used to + encrypt/decrypt the database to restore to. If this field is + not specified, the restored database will use the same + encryption configuration as the backup by default, namely + [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] + = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database_id: str = proto.Field( + proto.STRING, + number=2, + ) + backup: str = proto.Field( + proto.STRING, + number=3, + oneof='source', + ) + encryption_config: 'RestoreDatabaseEncryptionConfig' = proto.Field( + proto.MESSAGE, + number=4, + message='RestoreDatabaseEncryptionConfig', + ) + + +class RestoreDatabaseEncryptionConfig(proto.Message): + r"""Encryption configuration for the restored database. + + Attributes: + encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): + Required. The encryption type of the restored + database. + kms_key_name (str): + Optional. The Cloud KMS key that will be used to + encrypt/decrypt the restored database. This field should be + set only when + [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] + is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form + ``projects//locations//keyRings//cryptoKeys/``. + kms_key_names (MutableSequence[str]): + Optional. Specifies the KMS configuration for the one or + more keys used to encrypt the database. Values are of the + form + ``projects//locations//keyRings//cryptoKeys/``. + + The keys referenced by kms_key_names must fully cover all + regions of the database instance configuration. Some + examples: + + - For single region database instance configs, specify a + single regional location KMS key. + - For multi-regional database instance configs of type + GOOGLE_MANAGED, either specify a multi-regional location + KMS key or multiple regional location KMS keys that cover + all regions in the instance config. + - For a database instance config of type USER_MANAGED, + please specify only regional location KMS keys to cover + each region in the instance config. Multi-regional + location KMS keys are not supported for USER_MANAGED + instance configs. + """ + class EncryptionType(proto.Enum): + r"""Encryption types for the database to be restored. + + Values: + ENCRYPTION_TYPE_UNSPECIFIED (0): + Unspecified. Do not use. + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): + This is the default option when + [encryption_config][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig] + is not specified. + GOOGLE_DEFAULT_ENCRYPTION (2): + Use Google default encryption. + CUSTOMER_MANAGED_ENCRYPTION (3): + Use customer managed encryption. If specified, + ``kms_key_name`` must must contain a valid Cloud KMS key. + """ + ENCRYPTION_TYPE_UNSPECIFIED = 0 + USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 + GOOGLE_DEFAULT_ENCRYPTION = 2 + CUSTOMER_MANAGED_ENCRYPTION = 3 + + encryption_type: EncryptionType = proto.Field( + proto.ENUM, + number=1, + enum=EncryptionType, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class RestoreDatabaseMetadata(proto.Message): + r"""Metadata type for the long-running operation returned by + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the database being created and + restored to. + source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): + The type of the restore source. + backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): + Information about the backup used to restore + the database. + + This field is a member of `oneof`_ ``source_info``. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the + [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which cancellation of this operation was + received. + [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] + starts asynchronous cancellation on a long-running + operation. The server makes a best effort to cancel the + operation, but success is not guaranteed. Clients can use + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + or other methods to check whether the cancellation succeeded + or whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; + instead, it becomes an operation with an + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + optimize_database_operation_name (str): + If exists, the name of the long-running operation that will + be used to track the post-restore optimization process to + optimize the performance of the restored database, and + remove the dependency on the restore source. The name is of + the form + ``projects//instances//databases//operations/`` + where the is the name of database being created and restored + to. The metadata type of the long-running operation is + [OptimizeRestoredDatabaseMetadata][google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata]. + This long-running operation will be automatically created by + the system after the RestoreDatabase long-running operation + completes successfully. This operation will not be created + if the restore was not successful. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source_type: 'RestoreSourceType' = proto.Field( + proto.ENUM, + number=2, + enum='RestoreSourceType', + ) + backup_info: gsad_backup.BackupInfo = proto.Field( + proto.MESSAGE, + number=3, + oneof='source_info', + message=gsad_backup.BackupInfo, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=4, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + optimize_database_operation_name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class OptimizeRestoredDatabaseMetadata(proto.Message): + r"""Metadata type for the long-running operation used to track + the progress of optimizations performed on a newly restored + database. This long-running operation is automatically created + by the system after the successful completion of a database + restore, and cannot be cancelled. + + Attributes: + name (str): + Name of the restored database being + optimized. + progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): + The progress of the post-restore + optimizations. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + + +class DatabaseRole(proto.Message): + r"""A Cloud Spanner database role. + + Attributes: + name (str): + Required. The name of the database role. Values are of the + form + ``projects//instances//databases//databaseRoles/`` + where ```` is as specified in the ``CREATE ROLE`` DDL + statement. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDatabaseRolesRequest(proto.Message): + r"""The request for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Attributes: + parent (str): + Required. The database whose roles should be listed. Values + are of the form + ``projects//instances//databases/``. + page_size (int): + Number of database roles to be returned in + the response. If 0 or less, defaults to the + server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.database.v1.ListDatabaseRolesResponse.next_page_token] + from a previous + [ListDatabaseRolesResponse][google.spanner.admin.database.v1.ListDatabaseRolesResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDatabaseRolesResponse(proto.Message): + r"""The response for + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. + + Attributes: + database_roles (MutableSequence[google.cloud.spanner_admin_database_v1.types.DatabaseRole]): + Database roles that matched the request. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles] + call to fetch more of the matching roles. + """ + + @property + def raw_page(self): + return self + + database_roles: MutableSequence['DatabaseRole'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DatabaseRole', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AddSplitPointsRequest(proto.Message): + r"""The request for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + Attributes: + database (str): + Required. The database on whose tables/indexes split points + are to be added. Values are of the form + ``projects//instances//databases/``. + split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): + Required. The split points to add. + initiator (str): + Optional. A user-supplied tag associated with the split + points. For example, "intital_data_load", "special_event_1". + Defaults to "CloudAddSplitPointsAPI" if not specified. The + length of the tag must not exceed 50 characters,else will be + trimmed. Only valid UTF8 characters are allowed. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + split_points: MutableSequence['SplitPoints'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='SplitPoints', + ) + initiator: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AddSplitPointsResponse(proto.Message): + r"""The response for + [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. + + """ + + +class SplitPoints(proto.Message): + r"""The split points of a table/index. + + Attributes: + table (str): + The table to split. + index (str): + The index to split. If specified, the ``table`` field must + refer to the index's base table. + keys (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints.Key]): + Required. The list of split keys, i.e., the + split boundaries. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The expiration timestamp of the + split points. A timestamp in the past means + immediate expiration. The maximum value can be + 30 days in the future. Defaults to 10 days in + the future if not specified. + """ + + class Key(proto.Message): + r"""A split key. + + Attributes: + key_parts (google.protobuf.struct_pb2.ListValue): + Required. The column values making up the + split key. + """ + + key_parts: struct_pb2.ListValue = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.ListValue, + ) + + table: str = proto.Field( + proto.STRING, + number=1, + ) + index: str = proto.Field( + proto.STRING, + number=2, + ) + keys: MutableSequence[Key] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Key, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class InternalUpdateGraphOperationRequest(proto.Message): + r"""Internal request proto, do not use directly. + + Attributes: + database (str): + Internal field, do not use directly. + operation_id (str): + Internal field, do not use directly. + vm_identity_token (str): + Internal field, do not use directly. + progress (float): + Internal field, do not use directly. + status (google.rpc.status_pb2.Status): + Internal field, do not use directly. + """ + + database: str = proto.Field( + proto.STRING, + number=1, + ) + operation_id: str = proto.Field( + proto.STRING, + number=2, + ) + vm_identity_token: str = proto.Field( + proto.STRING, + number=5, + ) + progress: float = proto.Field( + proto.DOUBLE, + number=3, + ) + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=6, + message=status_pb2.Status, + ) + + +class InternalUpdateGraphOperationResponse(proto.Message): + r"""Internal response proto, do not use directly. + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/mypy.ini b/owl-bot-staging/spanner_admin_database/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_database/v1/noxfile.py b/owl-bot-staging/spanner_admin_database/v1/noxfile.py new file mode 100644 index 0000000000..feed1a3ac8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/noxfile.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] + +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-spanner-admin-database" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=PREVIEW_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json new file mode 100644 index 0000000000..e89008727d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json @@ -0,0 +1,4480 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.spanner.admin.database.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-spanner-admin-database", + "version": "0.0.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.add_split_points", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "AddSplitPoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "split_points", + "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", + "shortName": "add_split_points" + }, + "description": "Sample for AddSplitPoints", + "file": "spanner_v1_generated_database_admin_add_split_points_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_add_split_points_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.add_split_points", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "AddSplitPoints" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "split_points", + "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", + "shortName": "add_split_points" + }, + "description": "Sample for AddSplitPoints", + "file": "spanner_v1_generated_database_admin_add_split_points_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_add_split_points_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.copy_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CopyBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "source_backup", + "type": "str" + }, + { + "name": "expire_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "copy_backup" + }, + "description": "Sample for CopyBackup", + "file": "spanner_v1_generated_database_admin_copy_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_copy_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.copy_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CopyBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "source_backup", + "type": "str" + }, + { + "name": "expire_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "copy_backup" + }, + "description": "Sample for CopyBackup", + "file": "spanner_v1_generated_database_admin_copy_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_copy_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "backup_schedule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "spanner_v1_generated_database_admin_create_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "backup_schedule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "create_backup_schedule" + }, + "description": "Sample for CreateBackupSchedule", + "file": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "spanner_v1_generated_database_admin_create_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "backup_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_backup" + }, + "description": "Sample for CreateBackup", + "file": "spanner_v1_generated_database_admin_create_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "create_statement", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "spanner_v1_generated_database_admin_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "create_statement", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "spanner_v1_generated_database_admin_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_create_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup_schedule" + }, + "description": "Sample for DeleteBackupSchedule", + "file": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "spanner_v1_generated_database_admin_delete_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DeleteBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_backup" + }, + "description": "Sample for DeleteBackup", + "file": "spanner_v1_generated_database_admin_delete_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_delete_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.drop_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DropDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "drop_database" + }, + "description": "Sample for DropDatabase", + "file": "spanner_v1_generated_database_admin_drop_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_drop_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.drop_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "DropDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "drop_database" + }, + "description": "Sample for DropDatabase", + "file": "spanner_v1_generated_database_admin_drop_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_drop_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" + }, + "description": "Sample for GetBackupSchedule", + "file": "spanner_v1_generated_database_admin_get_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "get_backup_schedule" + }, + "description": "Sample for GetBackupSchedule", + "file": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "spanner_v1_generated_database_admin_get_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "get_backup" + }, + "description": "Sample for GetBackup", + "file": "spanner_v1_generated_database_admin_get_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", + "shortName": "get_database_ddl" + }, + "description": "Sample for GetDatabaseDdl", + "file": "spanner_v1_generated_database_admin_get_database_ddl_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_ddl_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", + "shortName": "get_database_ddl" + }, + "description": "Sample for GetDatabaseDdl", + "file": "spanner_v1_generated_database_admin_get_database_ddl_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_ddl_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "spanner_v1_generated_database_admin_get_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Database", + "shortName": "get_database" + }, + "description": "Sample for GetDatabase", + "file": "spanner_v1_generated_database_admin_get_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_database_admin_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_database_admin_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.internal_update_graph_operation", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "InternalUpdateGraphOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "operation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", + "shortName": "internal_update_graph_operation" + }, + "description": "Sample for InternalUpdateGraphOperation", + "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.internal_update_graph_operation", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "InternalUpdateGraphOperation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "operation_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", + "shortName": "internal_update_graph_operation" + }, + "description": "Sample for InternalUpdateGraphOperation", + "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager", + "shortName": "list_backup_operations" + }, + "description": "Sample for ListBackupOperations", + "file": "spanner_v1_generated_database_admin_list_backup_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager", + "shortName": "list_backup_operations" + }, + "description": "Sample for ListBackupOperations", + "file": "spanner_v1_generated_database_admin_list_backup_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_schedules", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupSchedules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager", + "shortName": "list_backup_schedules" + }, + "description": "Sample for ListBackupSchedules", + "file": "spanner_v1_generated_database_admin_list_backup_schedules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_schedules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_schedules", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackupSchedules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager", + "shortName": "list_backup_schedules" + }, + "description": "Sample for ListBackupSchedules", + "file": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backups", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "spanner_v1_generated_database_admin_list_backups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backups", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "spanner_v1_generated_database_admin_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager", + "shortName": "list_database_operations" + }, + "description": "Sample for ListDatabaseOperations", + "file": "spanner_v1_generated_database_admin_list_database_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_operations", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager", + "shortName": "list_database_operations" + }, + "description": "Sample for ListDatabaseOperations", + "file": "spanner_v1_generated_database_admin_list_database_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_roles", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseRoles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager", + "shortName": "list_database_roles" + }, + "description": "Sample for ListDatabaseRoles", + "file": "spanner_v1_generated_database_admin_list_database_roles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_roles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_roles", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabaseRoles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager", + "shortName": "list_database_roles" + }, + "description": "Sample for ListDatabaseRoles", + "file": "spanner_v1_generated_database_admin_list_database_roles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_database_roles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_databases", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "spanner_v1_generated_database_admin_list_databases_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_databases_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_databases", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "ListDatabases" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager", + "shortName": "list_databases" + }, + "description": "Sample for ListDatabases", + "file": "spanner_v1_generated_database_admin_list_databases_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_list_databases_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.restore_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "RestoreDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "backup", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_database" + }, + "description": "Sample for RestoreDatabase", + "file": "spanner_v1_generated_database_admin_restore_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_restore_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.restore_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "RestoreDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "backup", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_database" + }, + "description": "Sample for RestoreDatabase", + "file": "spanner_v1_generated_database_admin_restore_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_restore_database_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_database_admin_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_database_admin_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_database_admin_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "spanner_v1_generated_database_admin_update_backup_schedule_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_schedule_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup_schedule", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackupSchedule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" + }, + { + "name": "backup_schedule", + "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", + "shortName": "update_backup_schedule" + }, + "description": "Sample for UpdateBackupSchedule", + "file": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "spanner_v1_generated_database_admin_update_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" + }, + { + "name": "backup", + "type": "google.cloud.spanner_admin_database_v1.types.Backup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", + "shortName": "update_backup" + }, + "description": "Sample for UpdateBackup", + "file": "spanner_v1_generated_database_admin_update_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "statements", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_database_ddl" + }, + "description": "Sample for UpdateDatabaseDdl", + "file": "spanner_v1_generated_database_admin_update_database_ddl_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_ddl_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database_ddl", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabaseDdl" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" + }, + { + "name": "database", + "type": "str" + }, + { + "name": "statements", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_database_ddl" + }, + "description": "Sample for UpdateDatabaseDdl", + "file": "spanner_v1_generated_database_admin_update_database_ddl_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_ddl_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", + "shortName": "DatabaseAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.spanner_admin_database_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "spanner_v1_generated_database_admin_update_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", + "shortName": "DatabaseAdminClient" + }, + "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database", + "method": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", + "service": { + "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", + "shortName": "DatabaseAdmin" + }, + "shortName": "UpdateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" + }, + { + "name": "database", + "type": "google.cloud.spanner_admin_database_v1.types.Database" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_database" + }, + "description": "Sample for UpdateDatabase", + "file": "spanner_v1_generated_database_admin_update_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_database_admin_update_database_sync.py" + } + ] +} diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py new file mode 100644 index 0000000000..ff6fcfe598 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddSplitPoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = await client.add_split_points(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py new file mode 100644 index 0000000000..3819bbe986 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AddSplitPoints +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_add_split_points(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.AddSplitPointsRequest( + database="database_value", + ) + + # Make the request + response = client.add_split_points(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py new file mode 100644 index 0000000000..d885947bb5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py new file mode 100644 index 0000000000..a571e058c9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_copy_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CopyBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + source_backup="source_backup_value", + ) + + # Make the request + operation = client.copy_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py new file mode 100644 index 0000000000..2ad8881f54 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py new file mode 100644 index 0000000000..efdcc2457e --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = await client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py new file mode 100644 index 0000000000..60d4b50c3b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupScheduleRequest( + parent="parent_value", + backup_schedule_id="backup_schedule_id_value", + ) + + # Make the request + response = client.create_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py new file mode 100644 index 0000000000..02b9d1f0e7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateBackupRequest( + parent="parent_value", + backup_id="backup_id_value", + ) + + # Make the request + operation = client.create_backup(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py new file mode 100644 index 0000000000..47399a8d40 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py new file mode 100644 index 0000000000..6f112cd8a7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_create_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.CreateDatabaseRequest( + parent="parent_value", + create_statement="create_statement_value", + ) + + # Make the request + operation = client.create_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py new file mode 100644 index 0000000000..ab10785105 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py new file mode 100644 index 0000000000..591d45cb10 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + await client.delete_backup_schedule(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py new file mode 100644 index 0000000000..720417ba65 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_delete_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupScheduleRequest( + name="name_value", + ) + + # Make the request + client.delete_backup_schedule(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py new file mode 100644 index 0000000000..736dc56a23 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_delete_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DeleteBackupRequest( + name="name_value", + ) + + # Make the request + client.delete_backup(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py new file mode 100644 index 0000000000..15f279b72d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DropDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + await client.drop_database(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py new file mode 100644 index 0000000000..f218cabd83 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DropDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_drop_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.DropDatabaseRequest( + database="database_value", + ) + + # Make the request + client.drop_database(request=request) + + +# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py new file mode 100644 index 0000000000..58b93a119a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py new file mode 100644 index 0000000000..5a37eec975 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py new file mode 100644 index 0000000000..4006cac333 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupScheduleRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py new file mode 100644 index 0000000000..16cffcd78d --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetBackupRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py new file mode 100644 index 0000000000..fd8621c27b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = await client.get_database(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py new file mode 100644 index 0000000000..8e84b21f78 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = await client.get_database_ddl(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py new file mode 100644 index 0000000000..495b557a55 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseDdlRequest( + database="database_value", + ) + + # Make the request + response = client.get_database_ddl(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py new file mode 100644 index 0000000000..ab729bb9e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_get_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.GetDatabaseRequest( + name="name_value", + ) + + # Make the request + response = client.get_database(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py new file mode 100644 index 0000000000..d5d75de78b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py new file mode 100644 index 0000000000..75e0b48b1b --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py new file mode 100644 index 0000000000..556205a0aa --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InternalUpdateGraphOperation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = await client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py new file mode 100644 index 0000000000..46f1a3c88f --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InternalUpdateGraphOperation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_internal_update_graph_operation(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( + database="database_value", + operation_id="operation_id_value", + vm_identity_token="vm_identity_token_value", + ) + + # Make the request + response = client.internal_update_graph_operation(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py new file mode 100644 index 0000000000..a56ec9f80e --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py new file mode 100644 index 0000000000..6383e1b247 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backup_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py new file mode 100644 index 0000000000..25ac53891a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py new file mode 100644 index 0000000000..89cf82d278 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupSchedules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backup_schedules(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupSchedulesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_schedules(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py new file mode 100644 index 0000000000..140e519e07 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackups_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py new file mode 100644 index 0000000000..9f04036f74 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListBackups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_backups(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListBackupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListBackups_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py new file mode 100644 index 0000000000..3bc614b232 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py new file mode 100644 index 0000000000..3d4dc965a9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_database_operations(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py new file mode 100644 index 0000000000..46ec91ce89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseRoles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py new file mode 100644 index 0000000000..d39e4759dd --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabaseRoles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_database_roles(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabaseRolesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_database_roles(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py new file mode 100644 index 0000000000..586dfa56f1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py new file mode 100644 index 0000000000..e6ef221af6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDatabases +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_list_databases(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.ListDatabasesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_databases(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py new file mode 100644 index 0000000000..384c063c61 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py new file mode 100644 index 0000000000..a327a8ae13 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RestoreDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_restore_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.RestoreDatabaseRequest( + backup="backup_value", + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + operation = client.restore_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py new file mode 100644 index 0000000000..edade4c950 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py new file mode 100644 index 0000000000..28a6746f4a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py new file mode 100644 index 0000000000..0e6ea91cb3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py new file mode 100644 index 0000000000..3fd0316dc1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py new file mode 100644 index 0000000000..95fa2a63f6 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = await client.update_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py new file mode 100644 index 0000000000..de17dfc86e --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = await client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py new file mode 100644 index 0000000000..4ef64a0673 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupSchedule +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_backup_schedule(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupScheduleRequest( + ) + + # Make the request + response = client.update_backup_schedule(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py new file mode 100644 index 0000000000..9dbb0148dc --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_backup(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateBackupRequest( + ) + + # Make the request + response = client.update_backup(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py new file mode 100644 index 0000000000..d5588c3036 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py new file mode 100644 index 0000000000..ad98e2da9c --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +async def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py new file mode 100644 index 0000000000..73297524b9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabaseDdl +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_database_ddl(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( + database="database_value", + statements=['statements_value1', 'statements_value2'], + ) + + # Make the request + operation = client.update_database_ddl(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py new file mode 100644 index 0000000000..62ed40bc84 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-database + + +# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_database_v1 + + +def sample_update_database(): + # Create a client + client = spanner_admin_database_v1.DatabaseAdminClient() + + # Initialize request argument(s) + database = spanner_admin_database_v1.Database() + database.name = "name_value" + + request = spanner_admin_database_v1.UpdateDatabaseRequest( + database=database, + ) + + # Make the request + operation = client.update_database(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py new file mode 100644 index 0000000000..d642e9a0e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py @@ -0,0 +1,202 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spanner_admin_databaseCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'add_split_points': ('database', 'split_points', 'initiator', ), + 'copy_backup': ('parent', 'backup_id', 'source_backup', 'expire_time', 'encryption_config', ), + 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), + 'create_backup_schedule': ('parent', 'backup_schedule_id', 'backup_schedule', ), + 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', 'database_dialect', 'proto_descriptors', ), + 'delete_backup': ('name', ), + 'delete_backup_schedule': ('name', ), + 'drop_database': ('database', ), + 'get_backup': ('name', ), + 'get_backup_schedule': ('name', ), + 'get_database': ('name', ), + 'get_database_ddl': ('database', ), + 'get_iam_policy': ('resource', 'options', ), + 'internal_update_graph_operation': ('database', 'operation_id', 'vm_identity_token', 'progress', 'status', ), + 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_backup_schedules': ('parent', 'page_size', 'page_token', ), + 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_database_roles': ('parent', 'page_size', 'page_token', ), + 'list_databases': ('parent', 'page_size', 'page_token', ), + 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_backup': ('backup', 'update_mask', ), + 'update_backup_schedule': ('backup_schedule', 'update_mask', ), + 'update_database': ('database', 'update_mask', ), + 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', 'throughput_mode', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spanner_admin_databaseCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner_admin_database client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_database/v1/setup.py b/owl-bot-staging/spanner_admin_database/v1/setup.py new file mode 100644 index 0000000000..64017e80e7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/setup.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-spanner-admin-database' + + +description = "Google Cloud Spanner Admin Database API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/spanner_admin_database/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-database" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..5b1ee6c35a --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt @@ -0,0 +1,13 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py new file mode 100644 index 0000000000..0ba71c42e0 --- /dev/null +++ b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py @@ -0,0 +1,22226 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import re +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminAsyncClient +from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminClient +from google.cloud.spanner_admin_database_v1.services.database_admin import pagers +from google.cloud.spanner_admin_database_v1.services.database_admin import transports +from google.cloud.spanner_admin_database_v1.types import backup +from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup +from google.cloud.spanner_admin_database_v1.types import backup_schedule +from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule +from google.cloud.spanner_admin_database_v1.types import common +from google.cloud.spanner_admin_database_v1.types import spanner_database_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DatabaseAdminClient._get_default_mtls_endpoint(None) is None + assert DatabaseAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DatabaseAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DatabaseAdminClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + DatabaseAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DatabaseAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DatabaseAdminClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DatabaseAdminClient._get_client_cert_source(None, False) is None + assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert DatabaseAdminClient._get_client_cert_source(None, True) is mock_default_cert_source + assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert DatabaseAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT + assert DatabaseAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert DatabaseAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert DatabaseAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert DatabaseAdminClient._get_universe_domain(None, None) == DatabaseAdminClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + DatabaseAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatabaseAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatabaseAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (DatabaseAdminClient, "grpc"), + (DatabaseAdminAsyncClient, "grpc_asyncio"), + (DatabaseAdminClient, "rest"), +]) +def test_database_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DatabaseAdminGrpcTransport, "grpc"), + (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DatabaseAdminRestTransport, "rest"), +]) +def test_database_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DatabaseAdminClient, "grpc"), + (DatabaseAdminAsyncClient, "grpc_asyncio"), + (DatabaseAdminClient, "rest"), +]) +def test_database_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +def test_database_admin_client_get_transport_class(): + transport = DatabaseAdminClient.get_transport_class() + available_transports = [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminRestTransport, + ] + assert transport in available_transports + + transport = DatabaseAdminClient.get_transport_class("grpc") + assert transport == transports.DatabaseAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), +]) +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +def test_database_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "true"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "false"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "true"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "false"), +]) +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_database_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DatabaseAdminClient, DatabaseAdminAsyncClient +]) +@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) +def test_database_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + DatabaseAdminClient, DatabaseAdminAsyncClient +]) +@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) +@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) +def test_database_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE + default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), +]) +def test_database_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", None), +]) +def test_database_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_database_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DatabaseAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_database_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=None, + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabasesRequest, + dict, +]) +def test_list_databases(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_databases_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.ListDatabasesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_databases(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabasesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_databases_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_databases_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_databases in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_databases] = mock_rpc + + request = {} + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabasesRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabasesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_databases_async_from_dict(): + await test_list_databases_async(request_type=dict) + +def test_list_databases_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabasesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabasesResponse() + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_databases_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabasesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) + await client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_databases_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_databases_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_databases_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabasesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_databases( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_databases_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_databases_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_databases(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in results) +def test_list_databases_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + pages = list(client.list_databases(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_databases_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_databases(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_databases_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_databases(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.CreateDatabaseRequest, + dict, +]) +def test_create_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.CreateDatabaseRequest( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.CreateDatabaseRequest( + parent='parent_value', + create_statement='create_statement_value', + ) + +def test_create_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_database] = mock_rpc + + request = {} + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.CreateDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + +def test_create_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.CreateDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.CreateDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].create_statement + mock_val = 'create_statement_value' + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent='parent_value', + create_statement='create_statement_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].create_statement + mock_val = 'create_statement_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseRequest, + dict, +]) +def test_get_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + ) + response = client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +def test_get_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.GetDatabaseRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseRequest( + name='name_value', + ) + +def test_get_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database] = mock_rpc + request = {} + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_database] = mock_rpc + + request = {} + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + )) + response = await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +@pytest.mark.asyncio +async def test_get_database_async_from_dict(): + await test_get_database_async(request_type=dict) + +def test_get_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = spanner_database_admin.Database() + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) + await client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseRequest, + dict, +]) +def test_update_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.UpdateDatabaseRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseRequest( + ) + +def test_update_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + request = {} + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_database] = mock_rpc + + request = {} + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_database_async_from_dict(): + await test_update_database_async(request_type=dict) + +def test_update_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseRequest() + + request.database.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseRequest() + + request.database.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database.name=name_value', + ) in kw['metadata'] + + +def test_update_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database( + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = spanner_database_admin.Database(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database( + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = spanner_database_admin.Database(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseDdlRequest, + dict, +]) +def test_update_database_ddl(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_database_ddl_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.UpdateDatabaseDdlRequest( + database='database_value', + operation_id='operation_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_database_ddl(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest( + database='database_value', + operation_id='operation_id_value', + ) + +def test_update_database_ddl_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc + request = {} + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_database_ddl in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_database_ddl] = mock_rpc + + request = {} + await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.UpdateDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_database_ddl_async_from_dict(): + await test_update_database_ddl_async(request_type=dict) + +def test_update_database_ddl_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_database_ddl_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.UpdateDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_update_database_ddl_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_database_ddl( + database='database_value', + statements=['statements_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].statements + mock_val = ['statements_value'] + assert arg == mock_val + + +def test_update_database_ddl_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + +@pytest.mark.asyncio +async def test_update_database_ddl_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_database_ddl( + database='database_value', + statements=['statements_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].statements + mock_val = ['statements_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_database_ddl_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.DropDatabaseRequest, + dict, +]) +def test_drop_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.DropDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_drop_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.DropDatabaseRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.drop_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.DropDatabaseRequest( + database='database_value', + ) + +def test_drop_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.drop_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc + request = {} + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_drop_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.drop_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.drop_database] = mock_rpc + + request = {} + await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_drop_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.DropDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.DropDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_drop_database_async_from_dict(): + await test_drop_database_async(request_type=dict) + +def test_drop_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.DropDatabaseRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = None + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_drop_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.DropDatabaseRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_drop_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.drop_database( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_drop_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_drop_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.drop_database( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_drop_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseDdlRequest, + dict, +]) +def test_get_database_ddl(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + ) + response = client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + assert response.proto_descriptors == b'proto_descriptors_blob' + + +def test_get_database_ddl_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.GetDatabaseDdlRequest( + database='database_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_database_ddl(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.GetDatabaseDdlRequest( + database='database_value', + ) + +def test_get_database_ddl_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc + request = {} + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_database_ddl in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_database_ddl] = mock_rpc + + request = {} + await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseDdlRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + )) + response = await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.GetDatabaseDdlRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + assert response.proto_descriptors == b'proto_descriptors_blob' + + +@pytest.mark.asyncio +async def test_get_database_ddl_async_from_dict(): + await test_get_database_ddl_async(request_type=dict) + +def test_get_database_ddl_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_database_ddl_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.GetDatabaseDdlRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) + await client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_get_database_ddl_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_database_ddl( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + + +def test_get_database_ddl_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + +@pytest.mark.asyncio +async def test_get_database_ddl_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_database_ddl( + database='database_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_database_ddl_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + +def test_test_iam_permissions_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.CreateBackupRequest, + dict, +]) +def test_create_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup.CreateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup.CreateBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.CreateBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + ) + +def test_create_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + request = {} + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_backup] = mock_rpc + + request = {} + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.CreateBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup.CreateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_backup_async_from_dict(): + await test_create_backup_async(request_type=dict) + +def test_create_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.CreateBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.CreateBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + + +def test_create_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + +@pytest.mark.asyncio +async def test_create_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup.CopyBackupRequest, + dict, +]) +def test_copy_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.CopyBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_copy_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.CopyBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.copy_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.CopyBackupRequest( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + ) + +def test_copy_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + request = {} + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_copy_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.copy_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.copy_backup] = mock_rpc + + request = {} + await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_copy_backup_async(transport: str = 'grpc_asyncio', request_type=backup.CopyBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.CopyBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_backup_async_from_dict(): + await test_copy_backup_async(request_type=dict) + +def test_copy_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.CopyBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_copy_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.CopyBackupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_copy_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.copy_backup( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + arg = args[0].source_backup + mock_val = 'source_backup_value' + assert arg == mock_val + assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) + + +def test_copy_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.copy_backup( + backup.CopyBackupRequest(), + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + +@pytest.mark.asyncio +async def test_copy_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.copy_backup( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_id + mock_val = 'backup_id_value' + assert arg == mock_val + arg = args[0].source_backup + mock_val = 'source_backup_value' + assert arg == mock_val + assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) + +@pytest.mark.asyncio +async def test_copy_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.copy_backup( + backup.CopyBackupRequest(), + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize("request_type", [ + backup.GetBackupRequest, + dict, +]) +def test_get_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.GetBackupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.GetBackupRequest( + name='name_value', + ) + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_backup] = mock_rpc + + request = {} + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backup.GetBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + response = await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.asyncio +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) + +def test_get_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = backup.Backup() + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.GetBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + await client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.UpdateBackupRequest, + dict, +]) +def test_update_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + response = client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +def test_update_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup.UpdateBackupRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup.UpdateBackupRequest( + ) + +def test_update_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_backup] = mock_rpc + + request = {} + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.UpdateBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + response = await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup.UpdateBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.asyncio +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) + +def test_update_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.UpdateBackupRequest() + + request.backup.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = gsad_backup.Backup() + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup.UpdateBackupRequest() + + request.backup.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) + await client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup.name=name_value', + ) in kw['metadata'] + + +def test_update_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup.Backup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup + mock_val = gsad_backup.Backup(database='database_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + backup.DeleteBackupRequest, + dict, +]) +def test_delete_backup(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.DeleteBackupRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.DeleteBackupRequest( + name='name_value', + ) + +def test_delete_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_backup in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_backup] = mock_rpc + + request = {} + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backup.DeleteBackupRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.DeleteBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) + +def test_delete_backup_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = None + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.DeleteBackupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_backup_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_backup_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_backup_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupsRequest, + dict, +]) +def test_list_backups(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.ListBackupsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_backups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_backups_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_backups in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_backups] = mock_rpc + + request = {} + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.ListBackupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backups_async_from_dict(): + await test_list_backups_async(request_type=dict) + +def test_list_backups_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = backup.ListBackupsResponse() + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backups_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) + await client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backups_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backups_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backups_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backups_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + + +def test_list_backups_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backups(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) + for i in results) +def test_list_backups_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backups_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup.Backup) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_backups_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.RestoreDatabaseRequest, + dict, +]) +def test_restore_database(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.RestoreDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_restore_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.RestoreDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.restore_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.RestoreDatabaseRequest( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + +def test_restore_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc + request = {} + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_restore_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.restore_database in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.restore_database] = mock_rpc + + request = {} + await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.restore_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.RestoreDatabaseRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.RestoreDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_restore_database_async_from_dict(): + await test_restore_database_async(request_type=dict) + +def test_restore_database_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.RestoreDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_restore_database_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.RestoreDatabaseRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_restore_database_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.restore_database( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].database_id + mock_val = 'database_id_value' + assert arg == mock_val + assert args[0].backup == 'backup_value' + + +def test_restore_database_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + +@pytest.mark.asyncio +async def test_restore_database_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.restore_database( + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].database_id + mock_val = 'database_id_value' + assert arg == mock_val + assert args[0].backup == 'backup_value' + +@pytest.mark.asyncio +async def test_restore_database_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseOperationsRequest, + dict, +]) +def test_list_database_operations(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_database_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.ListDatabaseOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_database_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_database_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc + request = {} + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_database_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_database_operations] = mock_rpc + + request = {} + await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_database_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseOperationsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_database_operations_async_from_dict(): + await test_list_database_operations_async(request_type=dict) + +def test_list_database_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_database_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) + await client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_database_operations_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_database_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_database_operations_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_database_operations_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_database_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_database_operations_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + + +def test_list_database_operations_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_database_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_database_operations_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_database_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_database_operations_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_database_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_database_operations_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_database_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupOperationsRequest, + dict, +]) +def test_list_backup_operations(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup.ListBackupOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backup_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup.ListBackupOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_backup_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup.ListBackupOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_backup_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc + request = {} + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_backup_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_backup_operations] = mock_rpc + + request = {} + await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_operations_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupOperationsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup.ListBackupOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backup_operations_async_from_dict(): + await test_list_backup_operations_async(request_type=dict) + +def test_list_backup_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = backup.ListBackupOperationsResponse() + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backup_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup.ListBackupOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) + await client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backup_operations_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backup_operations_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backup_operations_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup.ListBackupOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backup_operations_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + + +def test_list_backup_operations_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backup_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_backup_operations_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backup_operations_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_operations_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseRolesRequest, + dict, +]) +def test_list_database_roles(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseRolesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseRolesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_database_roles_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.ListDatabaseRolesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_database_roles(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.ListDatabaseRolesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_database_roles_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_roles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc + request = {} + client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_roles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_roles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_database_roles in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_database_roles] = mock_rpc + + request = {} + await client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_database_roles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_database_roles_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseRolesRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.ListDatabaseRolesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseRolesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_database_roles_async_from_dict(): + await test_list_database_roles_async(request_type=dict) + +def test_list_database_roles_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseRolesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_database_roles_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.ListDatabaseRolesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) + await client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_database_roles_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_database_roles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_database_roles_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_database_roles_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_database_roles( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_database_roles_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent='parent_value', + ) + + +def test_list_database_roles_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_database_roles(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) + for i in results) +def test_list_database_roles_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + pages = list(client.list_database_roles(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_database_roles_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_database_roles(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_database_roles_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_database_roles(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.AddSplitPointsRequest, + dict, +]) +def test_add_split_points(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.AddSplitPointsResponse( + ) + response = client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.AddSplitPointsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) + + +def test_add_split_points_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.AddSplitPointsRequest( + database='database_value', + initiator='initiator_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.add_split_points(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.AddSplitPointsRequest( + database='database_value', + initiator='initiator_value', + ) + +def test_add_split_points_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_split_points in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc + request = {} + client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.add_split_points(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_add_split_points_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.add_split_points in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.add_split_points] = mock_rpc + + request = {} + await client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.add_split_points(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_add_split_points_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.AddSplitPointsRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( + )) + response = await client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.AddSplitPointsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) + + +@pytest.mark.asyncio +async def test_add_split_points_async_from_dict(): + await test_add_split_points_async(request_type=dict) + +def test_add_split_points_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.AddSplitPointsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value = spanner_database_admin.AddSplitPointsResponse() + client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_add_split_points_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_database_admin.AddSplitPointsRequest() + + request.database = 'database_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) + await client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'database=database_value', + ) in kw['metadata'] + + +def test_add_split_points_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.AddSplitPointsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.add_split_points( + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].split_points + mock_val = [spanner_database_admin.SplitPoints(table='table_value')] + assert arg == mock_val + + +def test_add_split_points_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_split_points( + spanner_database_admin.AddSplitPointsRequest(), + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + +@pytest.mark.asyncio +async def test_add_split_points_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.AddSplitPointsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.add_split_points( + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].split_points + mock_val = [spanner_database_admin.SplitPoints(table='table_value')] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_add_split_points_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.add_split_points( + spanner_database_admin.AddSplitPointsRequest(), + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.CreateBackupScheduleRequest, + dict, +]) +def test_create_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + response = client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.CreateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup_schedule.CreateBackupScheduleRequest( + parent='parent_value', + backup_schedule_id='backup_schedule_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest( + parent='parent_value', + backup_schedule_id='backup_schedule_id_value', + ) + +def test_create_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc + request = {} + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_backup_schedule] = mock_rpc + + request = {} + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.CreateBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + response = await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.CreateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_backup_schedule_async_from_dict(): + await test_create_backup_schedule_async(request_type=dict) + +def test_create_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.CreateBackupScheduleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.CreateBackupScheduleRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + await client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_backup_schedule( + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].backup_schedule_id + mock_val = 'backup_schedule_id_value' + assert arg == mock_val + + +def test_create_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_backup_schedule( + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].backup_schedule_id + mock_val = 'backup_schedule_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.GetBackupScheduleRequest, + dict, +]) +def test_get_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule( + name='name_value', + ) + response = client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.GetBackupScheduleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.GetBackupScheduleRequest( + name='name_value', + ) + +def test_get_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc + request = {} + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_backup_schedule] = mock_rpc + + request = {} + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.GetBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( + name='name_value', + )) + response = await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.GetBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_backup_schedule_async_from_dict(): + await test_get_backup_schedule_async(request_type=dict) + +def test_get_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.GetBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = backup_schedule.BackupSchedule() + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.GetBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) + await client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.UpdateBackupScheduleRequest, + dict, +]) +def test_update_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + response = client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gsad_backup_schedule.UpdateBackupScheduleRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest( + ) + +def test_update_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc + request = {} + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_backup_schedule] = mock_rpc + + request = {} + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + response = await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_backup_schedule_async_from_dict(): + await test_update_backup_schedule_async(request_type=dict) + +def test_update_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + request.backup_schedule.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup_schedule.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + + request.backup_schedule.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + await client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'backup_schedule.name=name_value', + ) in kw['metadata'] + + +def test_update_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_backup_schedule( + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gsad_backup_schedule.BackupSchedule() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_backup_schedule( + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].backup_schedule + mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.DeleteBackupScheduleRequest, + dict, +]) +def test_delete_backup_schedule(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.DeleteBackupScheduleRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_backup_schedule(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.DeleteBackupScheduleRequest( + name='name_value', + ) + +def test_delete_backup_schedule_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc + request = {} + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_backup_schedule in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_backup_schedule] = mock_rpc + + request = {} + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.DeleteBackupScheduleRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.DeleteBackupScheduleRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_async_from_dict(): + await test_delete_backup_schedule_async(request_type=dict) + +def test_delete_backup_schedule_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.DeleteBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = None + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_backup_schedule_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.DeleteBackupScheduleRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_backup_schedule_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_backup_schedule_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_schedule( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_backup_schedule_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.ListBackupSchedulesRequest, + dict, +]) +def test_list_backup_schedules(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backup_schedule.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backup_schedule.ListBackupSchedulesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_backup_schedules(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backup_schedule.ListBackupSchedulesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_backup_schedules_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_schedules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc + request = {} + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_backup_schedules in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_backup_schedules] = mock_rpc + + request = {} + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_backup_schedules_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.ListBackupSchedulesRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backup_schedule.ListBackupSchedulesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_from_dict(): + await test_list_backup_schedules_async(request_type=dict) + +def test_list_backup_schedules_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.ListBackupSchedulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = backup_schedule.ListBackupSchedulesResponse() + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_backup_schedules_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backup_schedule.ListBackupSchedulesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) + await client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_backup_schedules_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_schedules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_backup_schedules_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = backup_schedule.ListBackupSchedulesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_schedules( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_backup_schedules_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent='parent_value', + ) + + +def test_list_backup_schedules_pager(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_backup_schedules(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) + for i in results) +def test_list_backup_schedules_pages(transport_name: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_schedules(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_pager(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_schedules(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_schedules_async_pages(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_schedules(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.InternalUpdateGraphOperationRequest, + dict, +]) +def test_internal_update_graph_operation(request_type, transport: str = 'grpc'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse( + ) + response = client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.InternalUpdateGraphOperationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) + + +def test_internal_update_graph_operation_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_database_admin.InternalUpdateGraphOperationRequest( + database='database_value', + operation_id='operation_id_value', + vm_identity_token='vm_identity_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.internal_update_graph_operation(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_database_admin.InternalUpdateGraphOperationRequest( + database='database_value', + operation_id='operation_id_value', + vm_identity_token='vm_identity_token_value', + ) + +def test_internal_update_graph_operation_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.internal_update_graph_operation in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.internal_update_graph_operation] = mock_rpc + request = {} + client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.internal_update_graph_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.internal_update_graph_operation in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.internal_update_graph_operation] = mock_rpc + + request = {} + await client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.internal_update_graph_operation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.InternalUpdateGraphOperationRequest): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( + )) + response = await client.internal_update_graph_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_database_admin.InternalUpdateGraphOperationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) + + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_async_from_dict(): + await test_internal_update_graph_operation_async(request_type=dict) + + +def test_internal_update_graph_operation_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.internal_update_graph_operation( + database='database_value', + operation_id='operation_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].operation_id + mock_val = 'operation_id_value' + assert arg == mock_val + + +def test_internal_update_graph_operation_flattened_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.internal_update_graph_operation( + spanner_database_admin.InternalUpdateGraphOperationRequest(), + database='database_value', + operation_id='operation_id_value', + ) + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_flattened_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.internal_update_graph_operation( + database='database_value', + operation_id='operation_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].database + mock_val = 'database_value' + assert arg == mock_val + arg = args[0].operation_id + mock_val = 'operation_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_internal_update_graph_operation_flattened_error_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.internal_update_graph_operation( + spanner_database_admin.InternalUpdateGraphOperationRequest(), + database='database_value', + operation_id='operation_id_value', + ) + + +def test_list_databases_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_databases in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc + + request = {} + client.list_databases(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_databases(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_databases_rest_required_fields(request_type=spanner_database_admin.ListDatabasesRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_databases(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_databases_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) + + +def test_list_databases_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + spanner_database_admin.ListDatabasesRequest(), + parent='parent_value', + ) + + +def test_list_databases_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabasesResponse( + databases=[ + spanner_database_admin.Database(), + spanner_database_admin.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_database_admin.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.Database) + for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_create_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_database_rest_required_fields(request_type=spanner_database_admin.CreateDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["create_statement"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["createStatement"] = 'create_statement_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "createStatement" in jsonified_request + assert jsonified_request["createStatement"] == 'create_statement_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "createStatement", ))) + + +def test_create_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + create_statement='create_statement_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) + + +def test_create_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + spanner_database_admin.CreateDatabaseRequest(), + parent='parent_value', + create_statement='create_statement_value', + ) + + +def test_get_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database] = mock_rpc + + request = {} + client.get_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_database_rest_required_fields(request_type=spanner_database_admin.GetDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) + + +def test_get_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database( + spanner_database_admin.GetDatabaseRequest(), + name='name_value', + ) + + +def test_update_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database] = mock_rpc + + request = {} + client.update_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("database", "updateMask", ))) + + +def test_update_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database.name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) + + +def test_update_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database( + spanner_database_admin.UpdateDatabaseRequest(), + database=spanner_database_admin.Database(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_database_ddl_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc + + request = {} + client.update_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_database_ddl_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request_init["statements"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + jsonified_request["statements"] = 'statements_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + assert "statements" in jsonified_request + assert jsonified_request["statements"] == 'statements_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_database_ddl(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_database_ddl_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_database_ddl._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "statements", ))) + + +def test_update_database_ddl_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + statements=['statements_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_database_ddl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) + + +def test_update_database_ddl_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_database_ddl( + spanner_database_admin.UpdateDatabaseDdlRequest(), + database='database_value', + statements=['statements_value'], + ) + + +def test_drop_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.drop_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc + + request = {} + client.drop_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.drop_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_drop_database_rest_required_fields(request_type=spanner_database_admin.DropDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.drop_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_drop_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.drop_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +def test_drop_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.drop_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) + + +def test_drop_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.drop_database( + spanner_database_admin.DropDatabaseRequest(), + database='database_value', + ) + + +def test_get_database_ddl_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_database_ddl in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc + + request = {} + client.get_database_ddl(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_database_ddl(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_database_ddl_rest_required_fields(request_type=spanner_database_admin.GetDatabaseDdlRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.GetDatabaseDdlResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_database_ddl(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_database_ddl_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_database_ddl._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", ))) + + +def test_get_database_ddl_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.GetDatabaseDdlResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_database_ddl(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) + + +def test_get_database_ddl_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_database_ddl( + spanner_database_admin.GetDatabaseDdlRequest(), + database='database_value', + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) + + +def test_set_iam_policy_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", ))) + + +def test_get_iam_policy_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + jsonified_request["permissions"] = 'permissions_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == 'permissions_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) + + +def test_test_iam_permissions_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + permissions=['permissions_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_create_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc + + request = {} + client.create_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_rest_required_fields(request_type=gsad_backup.CreateBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "backupId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == request_init["backup_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["backupId"] = 'backup_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backup_id", "encryption_config", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == 'backup_id_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_backup(request) + + expected_params = [ + ( + "backupId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("backupId", "encryptionConfig", )) & set(("parent", "backupId", "backup", ))) + + +def test_create_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) + + +def test_create_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup( + gsad_backup.CreateBackupRequest(), + parent='parent_value', + backup=gsad_backup.Backup(database='database_value'), + backup_id='backup_id_value', + ) + + +def test_copy_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc + + request = {} + client.copy_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_id"] = "" + request_init["source_backup"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["backupId"] = 'backup_id_value' + jsonified_request["sourceBackup"] = 'source_backup_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "backupId" in jsonified_request + assert jsonified_request["backupId"] == 'backup_id_value' + assert "sourceBackup" in jsonified_request + assert jsonified_request["sourceBackup"] == 'source_backup_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.copy_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_copy_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.copy_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "backupId", "sourceBackup", "expireTime", ))) + + +def test_copy_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.copy_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups:copy" % client.transport._host, args[1]) + + +def test_copy_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.copy_backup( + backup.CopyBackupRequest(), + parent='parent_value', + backup_id='backup_id_value', + source_backup='source_backup_value', + expire_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_get_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + + request = {} + client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) + + +def test_get_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup( + backup.GetBackupRequest(), + name='name_value', + ) + + +def test_update_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + + request = {} + client.update_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_rest_required_fields(request_type=gsad_backup.UpdateBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsad_backup.Backup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("backup", "updateMask", ))) + + +def test_update_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup.Backup() + + # get arguments that satisfy an http rule for this method + sample_request = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{backup.name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) + + +def test_update_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup( + gsad_backup.UpdateBackupRequest(), + backup=gsad_backup.Backup(database='database_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + + request = {} + client.delete_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backup(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_backup_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_backup_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) + + +def test_delete_backup_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backup.DeleteBackupRequest(), + name='name_value', + ) + + +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backups in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + + request = {} + client.list_backups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backups(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backups_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_backups_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) + + +def test_list_backups_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backups( + backup.ListBackupsRequest(), + parent='parent_value', + ) + + +def test_list_backups_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + backup.Backup(), + ], + next_page_token='abc', + ), + backup.ListBackupsResponse( + backups=[], + next_page_token='def', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + ], + next_page_token='ghi', + ), + backup.ListBackupsResponse( + backups=[ + backup.Backup(), + backup.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup.Backup) + for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_restore_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.restore_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc + + request = {} + client.restore_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.restore_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_restore_database_rest_required_fields(request_type=spanner_database_admin.RestoreDatabaseRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["database_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["databaseId"] = 'database_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == 'database_id_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.restore_database(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_restore_database_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.restore_database._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "databaseId", ))) + + +def test_restore_database_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + database_id='database_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.restore_database(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases:restore" % client.transport._host, args[1]) + + +def test_restore_database_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_database( + spanner_database_admin.RestoreDatabaseRequest(), + parent='parent_value', + database_id='database_id_value', + backup='backup_value', + ) + + +def test_list_database_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc + + request = {} + client.list_database_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_database_operations_rest_required_fields(request_type=spanner_database_admin.ListDatabaseOperationsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_database_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_database_operations_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_database_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_database_operations_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_database_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databaseOperations" % client.transport._host, args[1]) + + +def test_list_database_operations_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_operations( + spanner_database_admin.ListDatabaseOperationsRequest(), + parent='parent_value', + ) + + +def test_list_database_operations_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_database_admin.ListDatabaseOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_database_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_database_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_backup_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc + + request = {} + client.list_backup_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_operations_rest_required_fields(request_type=backup.ListBackupOperationsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backup_operations_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backup_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_backup_operations_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backupOperations" % client.transport._host, args[1]) + + +def test_list_backup_operations_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_operations( + backup.ListBackupOperationsRequest(), + parent='parent_value', + ) + + +def test_list_backup_operations_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + backup.ListBackupOperationsResponse( + operations=[], + next_page_token='def', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + backup.ListBackupOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup.ListBackupOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_backup_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_backup_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_database_roles_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_database_roles in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc + + request = {} + client.list_database_roles(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_database_roles(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_database_roles_rest_required_fields(request_type=spanner_database_admin.ListDatabaseRolesRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseRolesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_database_roles(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_database_roles_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_database_roles._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_database_roles_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseRolesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_database_roles(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles" % client.transport._host, args[1]) + + +def test_list_database_roles_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_database_roles( + spanner_database_admin.ListDatabaseRolesRequest(), + parent='parent_value', + ) + + +def test_list_database_roles_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + next_page_token='abc', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[], + next_page_token='def', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + ], + next_page_token='ghi', + ), + spanner_database_admin.ListDatabaseRolesResponse( + database_roles=[ + spanner_database_admin.DatabaseRole(), + spanner_database_admin.DatabaseRole(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_database_admin.ListDatabaseRolesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + pager = client.list_database_roles(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_database_admin.DatabaseRole) + for i in results) + + pages = list(client.list_database_roles(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_add_split_points_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_split_points in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc + + request = {} + client.add_split_points(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.add_split_points(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_add_split_points_rest_required_fields(request_type=spanner_database_admin.AddSplitPointsRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["database"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["database"] = 'database_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "database" in jsonified_request + assert jsonified_request["database"] == 'database_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.AddSplitPointsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.add_split_points(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_add_split_points_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.add_split_points._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("database", "splitPoints", ))) + + +def test_add_split_points_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.AddSplitPointsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.add_split_points(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints" % client.transport._host, args[1]) + + +def test_add_split_points_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.add_split_points( + spanner_database_admin.AddSplitPointsRequest(), + database='database_value', + split_points=[spanner_database_admin.SplitPoints(table='table_value')], + ) + + +def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc + + request = {} + client.create_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_schedule_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "backupScheduleId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupScheduleId" in jsonified_request + assert jsonified_request["backupScheduleId"] == request_init["backup_schedule_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["backupScheduleId"] = 'backup_schedule_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("backup_schedule_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "backupScheduleId" in jsonified_request + assert jsonified_request["backupScheduleId"] == 'backup_schedule_id_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_backup_schedule(request) + + expected_params = [ + ( + "backupScheduleId", + "", + ), + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("backupScheduleId", )) & set(("parent", "backupScheduleId", "backupSchedule", ))) + + +def test_create_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) + + +def test_create_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_schedule( + gsad_backup_schedule.CreateBackupScheduleRequest(), + parent='parent_value', + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + backup_schedule_id='backup_schedule_id_value', + ) + + +def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc + + request = {} + client.get_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_schedule_rest_required_fields(request_type=backup_schedule.GetBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup_schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_get_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_schedule( + backup_schedule.GetBackupScheduleRequest(), + name='name_value', + ) + + +def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc + + request = {} + client.update_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("backupSchedule", "updateMask", ))) + + +def test_update_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule() + + # get arguments that satisfy an http rule for this method + sample_request = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} + + # get truthy value for each flattened field + mock_args = dict( + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_update_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_schedule( + gsad_backup_schedule.UpdateBackupScheduleRequest(), + backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_backup_schedule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc + + request = {} + client.delete_backup_schedule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_backup_schedule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_schedule_rest_required_fields(request_type=backup_schedule.DeleteBackupScheduleRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backup_schedule(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_backup_schedule_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_backup_schedule_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup_schedule(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) + + +def test_delete_backup_schedule_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_schedule( + backup_schedule.DeleteBackupScheduleRequest(), + name='name_value', + ) + + +def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_schedules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc + + request = {} + client.list_backup_schedules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_schedules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_schedules_rest_required_fields(request_type=backup_schedule.ListBackupSchedulesRequest): + transport_class = transports.DatabaseAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backup_schedule.ListBackupSchedulesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_schedules(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_backup_schedules_rest_unset_required_fields(): + transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_backup_schedules_rest_flattened(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.ListBackupSchedulesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_schedules(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) + + +def test_list_backup_schedules_rest_flattened_error(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_schedules( + backup_schedule.ListBackupSchedulesRequest(), + parent='parent_value', + ) + + +def test_list_backup_schedules_rest_pager(transport: str = 'rest'): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + next_page_token='abc', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[], + next_page_token='def', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + ], + next_page_token='ghi', + ), + backup_schedule.ListBackupSchedulesResponse( + backup_schedules=[ + backup_schedule.BackupSchedule(), + backup_schedule.BackupSchedule(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backup_schedule.ListBackupSchedulesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + + pager = client.list_backup_schedules(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backup_schedule.BackupSchedule) + for i in results) + + pages = list(client.list_backup_schedules(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_internal_update_graph_operation_rest_no_http_options(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = spanner_database_admin.InternalUpdateGraphOperationRequest() + with pytest.raises(RuntimeError): + client.internal_update_graph_operation(request) + + +def test_internal_update_graph_operation_rest_error(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.internal_update_graph_operation({}) + assert ( + "Method InternalUpdateGraphOperation is not available over REST transport" + in str(not_implemented_error.value) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DatabaseAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DatabaseAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DatabaseAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DatabaseAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + transports.DatabaseAdminRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = DatabaseAdminClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_databases_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabasesResponse() + client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + call.return_value = spanner_database_admin.Database() + client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_ddl_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_drop_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + call.return_value = None + client.drop_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.DropDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_ddl_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + call.return_value = spanner_database_admin.GetDatabaseDdlResponse() + client.get_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.CreateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.copy_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.CopyBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + call.return_value = backup.Backup() + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + call.return_value = gsad_backup.Backup() + client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + call.return_value = None + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + call.return_value = backup.ListBackupsResponse() + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_database_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_operations_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + client.list_database_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_operations_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + call.return_value = backup.ListBackupOperationsResponse() + client.list_backup_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_roles_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + call.return_value = spanner_database_admin.ListDatabaseRolesResponse() + client.list_database_roles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseRolesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_add_split_points_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + call.return_value = spanner_database_admin.AddSplitPointsResponse() + client.add_split_points(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.AddSplitPointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + call.return_value = backup_schedule.BackupSchedule() + client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + call.return_value = gsad_backup_schedule.BackupSchedule() + client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_schedule_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + call.return_value = None + client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_schedules_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + call.return_value = backup_schedule.ListBackupSchedulesResponse() + client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_internal_update_graph_operation_empty_call_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() + client.internal_update_graph_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DatabaseAdminAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_databases_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + )) + await client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + )) + await client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_database_ddl_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_drop_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.drop_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.DropDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_database_ddl_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + )) + await client.get_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.CreateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_copy_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.copy_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.CopyBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + await client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + )) + await client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backups_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( + next_page_token='next_page_token_value', + )) + await client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_restore_database_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_database_operations_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + )) + await client.list_database_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_operations_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + )) + await client.list_backup_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_database_roles_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + )) + await client.list_database_roles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseRolesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_add_split_points_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( + )) + await client.add_split_points(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.AddSplitPointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + await client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( + name='name_value', + )) + await client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( + name='name_value', + )) + await client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_schedule_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_schedules_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + )) + await client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_internal_update_graph_operation_empty_call_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( + )) + await client.internal_update_graph_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DatabaseAdminClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_databases_rest_bad_request(request_type=spanner_database_admin.ListDatabasesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_databases(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabasesRequest, + dict, +]) +def test_list_databases_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabasesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_databases(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_databases_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_databases") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.ListDatabasesRequest.pb(spanner_database_admin.ListDatabasesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.ListDatabasesResponse.to_json(spanner_database_admin.ListDatabasesResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.ListDatabasesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabasesResponse() + post_with_metadata.return_value = spanner_database_admin.ListDatabasesResponse(), metadata + + client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_database_rest_bad_request(request_type=spanner_database_admin.CreateDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.CreateDatabaseRequest, + dict, +]) +def test_create_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.CreateDatabaseRequest.pb(spanner_database_admin.CreateDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.CreateDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_database_rest_bad_request(request_type=spanner_database_admin.GetDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseRequest, + dict, +]) +def test_get_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.Database( + name='name_value', + state=spanner_database_admin.Database.State.CREATING, + version_retention_period='version_retention_period_value', + default_leader='default_leader_value', + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + enable_drop_protection=True, + reconciling=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.Database.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_database(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.Database) + assert response.name == 'name_value' + assert response.state == spanner_database_admin.Database.State.CREATING + assert response.version_retention_period == 'version_retention_period_value' + assert response.default_leader == 'default_leader_value' + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.enable_drop_protection is True + assert response.reconciling is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.GetDatabaseRequest.pb(spanner_database_admin.GetDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.Database.to_json(spanner_database_admin.Database()) + req.return_value.content = return_value + + request = spanner_database_admin.GetDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.Database() + post_with_metadata.return_value = spanner_database_admin.Database(), metadata + + client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_database_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseRequest, + dict, +]) +def test_update_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} + request_init["database"] = {'name': 'projects/sample1/instances/sample2/databases/sample3', 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'restore_info': {'source_type': 1, 'backup_info': {'backup': 'backup_value', 'version_time': {}, 'create_time': {}, 'source_database': 'source_database_value'}}, 'encryption_config': {'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'encryption_info': [{'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}], 'version_retention_period': 'version_retention_period_value', 'earliest_version_time': {}, 'default_leader': 'default_leader_value', 'database_dialect': 1, 'enable_drop_protection': True, 'reconciling': True} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.UpdateDatabaseRequest.pb(spanner_database_admin.UpdateDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.UpdateDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_database_ddl_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_database_ddl(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.UpdateDatabaseDdlRequest, + dict, +]) +def test_update_database_ddl_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_database_ddl(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_database_ddl_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database_ddl") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.UpdateDatabaseDdlRequest.pb(spanner_database_admin.UpdateDatabaseDdlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.UpdateDatabaseDdlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_drop_database_rest_bad_request(request_type=spanner_database_admin.DropDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.drop_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.DropDatabaseRequest, + dict, +]) +def test_drop_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.drop_database(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_drop_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_drop_database") as pre: + pre.assert_not_called() + pb_message = spanner_database_admin.DropDatabaseRequest.pb(spanner_database_admin.DropDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_database_admin.DropDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.drop_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_get_database_ddl_rest_bad_request(request_type=spanner_database_admin.GetDatabaseDdlRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_database_ddl(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.GetDatabaseDdlRequest, + dict, +]) +def test_get_database_ddl_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.GetDatabaseDdlResponse( + statements=['statements_value'], + proto_descriptors=b'proto_descriptors_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_database_ddl(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) + assert response.statements == ['statements_value'] + assert response.proto_descriptors == b'proto_descriptors_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_database_ddl_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database_ddl") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.GetDatabaseDdlRequest.pb(spanner_database_admin.GetDatabaseDdlRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.GetDatabaseDdlResponse.to_json(spanner_database_admin.GetDatabaseDdlResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.GetDatabaseDdlRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.GetDatabaseDdlResponse() + post_with_metadata.return_value = spanner_database_admin.GetDatabaseDdlResponse(), metadata + + client.get_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) + req.return_value.content = return_value + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backup_rest_bad_request(request_type=gsad_backup.CreateBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.CreateBackupRequest, + dict, +]) +def test_create_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'name_value', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup.CreateBackupRequest.pb(gsad_backup.CreateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = gsad_backup.CreateBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_copy_backup_rest_bad_request(request_type=backup.CopyBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.copy_backup(request) + + +@pytest.mark.parametrize("request_type", [ + backup.CopyBackupRequest, + dict, +]) +def test_copy_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.copy_backup(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_copy_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_copy_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.CopyBackupRequest.pb(backup.CopyBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backup.CopyBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.copy_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup(request) + + +@pytest.mark.parametrize("request_type", [ + backup.GetBackupRequest, + dict, +]) +def test_get_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.Backup.to_json(backup.Backup()) + req.return_value.content = return_value + + request = backup.GetBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.Backup() + post_with_metadata.return_value = backup.Backup(), metadata + + client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_backup_rest_bad_request(request_type=gsad_backup.UpdateBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup.UpdateBackupRequest, + dict, +]) +def test_update_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} + request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'projects/sample1/instances/sample2/backups/sample3', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] + else: + del request_init["backup"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup.Backup( + database='database_value', + name='name_value', + size_bytes=1089, + freeable_size_bytes=2006, + exclusive_size_bytes=2168, + state=gsad_backup.Backup.State.CREATING, + referencing_databases=['referencing_databases_value'], + database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, + referencing_backups=['referencing_backups_value'], + backup_schedules=['backup_schedules_value'], + incremental_backup_chain_id='incremental_backup_chain_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup.Backup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backup(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup.Backup) + assert response.database == 'database_value' + assert response.name == 'name_value' + assert response.size_bytes == 1089 + assert response.freeable_size_bytes == 2006 + assert response.exclusive_size_bytes == 2168 + assert response.state == gsad_backup.Backup.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL + assert response.referencing_backups == ['referencing_backups_value'] + assert response.backup_schedules == ['backup_schedules_value'] + assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup.UpdateBackupRequest.pb(gsad_backup.UpdateBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gsad_backup.Backup.to_json(gsad_backup.Backup()) + req.return_value.content = return_value + + request = gsad_backup.UpdateBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsad_backup.Backup() + post_with_metadata.return_value = gsad_backup.Backup(), metadata + + client.update_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup(request) + + +@pytest.mark.parametrize("request_type", [ + backup.DeleteBackupRequest, + dict, +]) +def test_delete_backup_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup") as pre: + pre.assert_not_called() + pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = backup.DeleteBackupRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backups(request) + + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupsRequest, + dict, +]) +def test_list_backups_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backups(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backups_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backups") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.ListBackupsResponse.to_json(backup.ListBackupsResponse()) + req.return_value.content = return_value + + request = backup.ListBackupsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.ListBackupsResponse() + post_with_metadata.return_value = backup.ListBackupsResponse(), metadata + + client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_restore_database_rest_bad_request(request_type=spanner_database_admin.RestoreDatabaseRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.restore_database(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.RestoreDatabaseRequest, + dict, +]) +def test_restore_database_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.restore_database(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_restore_database_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_restore_database") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.RestoreDatabaseRequest.pb(spanner_database_admin.RestoreDatabaseRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_database_admin.RestoreDatabaseRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.restore_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_database_operations_rest_bad_request(request_type=spanner_database_admin.ListDatabaseOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_database_operations(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseOperationsRequest, + dict, +]) +def test_list_database_operations_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseOperationsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_database_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_database_operations_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.ListDatabaseOperationsRequest.pb(spanner_database_admin.ListDatabaseOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.ListDatabaseOperationsResponse.to_json(spanner_database_admin.ListDatabaseOperationsResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.ListDatabaseOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabaseOperationsResponse() + post_with_metadata.return_value = spanner_database_admin.ListDatabaseOperationsResponse(), metadata + + client.list_database_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_backup_operations_rest_bad_request(request_type=backup.ListBackupOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_operations(request) + + +@pytest.mark.parametrize("request_type", [ + backup.ListBackupOperationsRequest, + dict, +]) +def test_list_backup_operations_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup.ListBackupOperationsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup.ListBackupOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_operations_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup.ListBackupOperationsRequest.pb(backup.ListBackupOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup.ListBackupOperationsResponse.to_json(backup.ListBackupOperationsResponse()) + req.return_value.content = return_value + + request = backup.ListBackupOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup.ListBackupOperationsResponse() + post_with_metadata.return_value = backup.ListBackupOperationsResponse(), metadata + + client.list_backup_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_database_roles_rest_bad_request(request_type=spanner_database_admin.ListDatabaseRolesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_database_roles(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.ListDatabaseRolesRequest, + dict, +]) +def test_list_database_roles_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.ListDatabaseRolesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_database_roles(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatabaseRolesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_database_roles_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_roles") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.ListDatabaseRolesRequest.pb(spanner_database_admin.ListDatabaseRolesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.ListDatabaseRolesResponse.to_json(spanner_database_admin.ListDatabaseRolesResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.ListDatabaseRolesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.ListDatabaseRolesResponse() + post_with_metadata.return_value = spanner_database_admin.ListDatabaseRolesResponse(), metadata + + client.list_database_roles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_add_split_points_rest_bad_request(request_type=spanner_database_admin.AddSplitPointsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.add_split_points(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_database_admin.AddSplitPointsRequest, + dict, +]) +def test_add_split_points_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_database_admin.AddSplitPointsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.add_split_points(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_split_points_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_add_split_points") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_database_admin.AddSplitPointsRequest.pb(spanner_database_admin.AddSplitPointsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_database_admin.AddSplitPointsResponse.to_json(spanner_database_admin.AddSplitPointsResponse()) + req.return_value.content = return_value + + request = spanner_database_admin.AddSplitPointsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_database_admin.AddSplitPointsResponse() + post_with_metadata.return_value = spanner_database_admin.AddSplitPointsResponse(), metadata + + client.add_split_points(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.CreateBackupScheduleRequest, + dict, +]) +def test_create_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request_init["backup_schedule"] = {'name': 'name_value', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup_schedule.CreateBackupScheduleRequest.meta.fields["backup_schedule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup_schedule.CreateBackupScheduleRequest.pb(gsad_backup_schedule.CreateBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) + req.return_value.content = return_value + + request = gsad_backup_schedule.CreateBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsad_backup_schedule.BackupSchedule() + post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata + + client.create_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backup_schedule_rest_bad_request(request_type=backup_schedule.GetBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.GetBackupScheduleRequest, + dict, +]) +def test_get_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup_schedule.GetBackupScheduleRequest.pb(backup_schedule.GetBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup_schedule.BackupSchedule.to_json(backup_schedule.BackupSchedule()) + req.return_value.content = return_value + + request = backup_schedule.GetBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup_schedule.BackupSchedule() + post_with_metadata.return_value = backup_schedule.BackupSchedule(), metadata + + client.get_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + gsad_backup_schedule.UpdateBackupScheduleRequest, + dict, +]) +def test_update_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} + request_init["backup_schedule"] = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gsad_backup_schedule.UpdateBackupScheduleRequest.meta.fields["backup_schedule"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_schedule"][field])): + del request_init["backup_schedule"][field][i][subfield] + else: + del request_init["backup_schedule"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gsad_backup_schedule.BackupSchedule( + name='name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gsad_backup_schedule.BackupSchedule) + assert response.name == 'name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup_schedule") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(gsad_backup_schedule.UpdateBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) + req.return_value.content = return_value + + request = gsad_backup_schedule.UpdateBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gsad_backup_schedule.BackupSchedule() + post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata + + client.update_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_backup_schedule_rest_bad_request(request_type=backup_schedule.DeleteBackupScheduleRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup_schedule(request) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.DeleteBackupScheduleRequest, + dict, +]) +def test_delete_backup_schedule_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup_schedule(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_backup_schedule_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup_schedule") as pre: + pre.assert_not_called() + pb_message = backup_schedule.DeleteBackupScheduleRequest.pb(backup_schedule.DeleteBackupScheduleRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = backup_schedule.DeleteBackupScheduleRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_backup_schedules_rest_bad_request(request_type=backup_schedule.ListBackupSchedulesRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_schedules(request) + + +@pytest.mark.parametrize("request_type", [ + backup_schedule.ListBackupSchedulesRequest, + dict, +]) +def test_list_backup_schedules_rest_call_success(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = backup_schedule.ListBackupSchedulesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_schedules(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupSchedulesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_schedules_rest_interceptors(null_interceptor): + transport = transports.DatabaseAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), + ) + client = DatabaseAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules") as post, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_schedules") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backup_schedule.ListBackupSchedulesRequest.pb(backup_schedule.ListBackupSchedulesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backup_schedule.ListBackupSchedulesResponse.to_json(backup_schedule.ListBackupSchedulesResponse()) + req.return_value.content = return_value + + request = backup_schedule.ListBackupSchedulesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backup_schedule.ListBackupSchedulesResponse() + post_with_metadata.return_value = backup_schedule.ListBackupSchedulesResponse(), metadata + + client.list_backup_schedules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_internal_update_graph_operation_rest_error(): + + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.internal_update_graph_operation({}) + assert ( + "Method InternalUpdateGraphOperation is not available over REST transport" + in str(not_implemented_error.value) + ) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_databases_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_databases), + '__call__') as call: + client.list_databases(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabasesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_database), + '__call__') as call: + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.CreateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database), + '__call__') as call: + client.get_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database), + '__call__') as call: + client.update_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_database_ddl_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_database_ddl), + '__call__') as call: + client.update_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_drop_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.drop_database), + '__call__') as call: + client.drop_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.DropDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_database_ddl_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_database_ddl), + '__call__') as call: + client.get_database_ddl(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.GetDatabaseDdlRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup), + '__call__') as call: + client.create_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.CreateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.copy_backup), + '__call__') as call: + client.copy_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.CopyBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup), + '__call__') as call: + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup), + '__call__') as call: + client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup), + '__call__') as call: + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backups), + '__call__') as call: + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_database_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.restore_database), + '__call__') as call: + client.restore_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.RestoreDatabaseRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_operations_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_operations), + '__call__') as call: + client.list_database_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_operations_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_operations), + '__call__') as call: + client.list_backup_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup.ListBackupOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_database_roles_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_database_roles), + '__call__') as call: + client.list_database_roles(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.ListDatabaseRolesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_add_split_points_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.add_split_points), + '__call__') as call: + client.add_split_points(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.AddSplitPointsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_schedule), + '__call__') as call: + client.create_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_schedule), + '__call__') as call: + client.get_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.GetBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_schedule), + '__call__') as call: + client.update_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_schedule_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_schedule), + '__call__') as call: + client.delete_backup_schedule(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.DeleteBackupScheduleRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_schedules_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_schedules), + '__call__') as call: + client.list_backup_schedules(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backup_schedule.ListBackupSchedulesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_internal_update_graph_operation_empty_call_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.internal_update_graph_operation), + '__call__') as call: + client.internal_update_graph_operation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() + + assert args[0] == request_msg + + +def test_database_admin_rest_lro_client(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DatabaseAdminGrpcTransport, + ) + +def test_database_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DatabaseAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_database_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DatabaseAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_databases', + 'create_database', + 'get_database', + 'update_database', + 'update_database_ddl', + 'drop_database', + 'get_database_ddl', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'create_backup', + 'copy_backup', + 'get_backup', + 'update_backup', + 'delete_backup', + 'list_backups', + 'restore_database', + 'list_database_operations', + 'list_backup_operations', + 'list_database_roles', + 'add_split_points', + 'create_backup_schedule', + 'get_backup_schedule', + 'update_backup_schedule', + 'delete_backup_schedule', + 'list_backup_schedules', + 'internal_update_graph_operation', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_database_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatabaseAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id="octopus", + ) + + +def test_database_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DatabaseAdminTransport() + adc.assert_called_once() + + +def test_database_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DatabaseAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + ], +) +def test_database_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DatabaseAdminGrpcTransport, + transports.DatabaseAdminGrpcAsyncIOTransport, + transports.DatabaseAdminRestTransport, + ], +) +def test_database_admin_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DatabaseAdminGrpcTransport, grpc_helpers), + (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_database_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_database_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.DatabaseAdminRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_database_admin_host_no_port(transport_name): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_database_admin_host_with_port(transport_name): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_database_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DatabaseAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DatabaseAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_databases._session + session2 = client2.transport.list_databases._session + assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 + session1 = client1.transport.get_database._session + session2 = client2.transport.get_database._session + assert session1 != session2 + session1 = client1.transport.update_database._session + session2 = client2.transport.update_database._session + assert session1 != session2 + session1 = client1.transport.update_database_ddl._session + session2 = client2.transport.update_database_ddl._session + assert session1 != session2 + session1 = client1.transport.drop_database._session + session2 = client2.transport.drop_database._session + assert session1 != session2 + session1 = client1.transport.get_database_ddl._session + session2 = client2.transport.get_database_ddl._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.create_backup._session + session2 = client2.transport.create_backup._session + assert session1 != session2 + session1 = client1.transport.copy_backup._session + session2 = client2.transport.copy_backup._session + assert session1 != session2 + session1 = client1.transport.get_backup._session + session2 = client2.transport.get_backup._session + assert session1 != session2 + session1 = client1.transport.update_backup._session + session2 = client2.transport.update_backup._session + assert session1 != session2 + session1 = client1.transport.delete_backup._session + session2 = client2.transport.delete_backup._session + assert session1 != session2 + session1 = client1.transport.list_backups._session + session2 = client2.transport.list_backups._session + assert session1 != session2 + session1 = client1.transport.restore_database._session + session2 = client2.transport.restore_database._session + assert session1 != session2 + session1 = client1.transport.list_database_operations._session + session2 = client2.transport.list_database_operations._session + assert session1 != session2 + session1 = client1.transport.list_backup_operations._session + session2 = client2.transport.list_backup_operations._session + assert session1 != session2 + session1 = client1.transport.list_database_roles._session + session2 = client2.transport.list_database_roles._session + assert session1 != session2 + session1 = client1.transport.add_split_points._session + session2 = client2.transport.add_split_points._session + assert session1 != session2 + session1 = client1.transport.create_backup_schedule._session + session2 = client2.transport.create_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.get_backup_schedule._session + session2 = client2.transport.get_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.update_backup_schedule._session + session2 = client2.transport.update_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.delete_backup_schedule._session + session2 = client2.transport.delete_backup_schedule._session + assert session1 != session2 + session1 = client1.transport.list_backup_schedules._session + session2 = client2.transport.list_backup_schedules._session + assert session1 != session2 + session1 = client1.transport.internal_update_graph_operation._session + session2 = client2.transport.internal_update_graph_operation._session + assert session1 != session2 +def test_database_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatabaseAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_database_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DatabaseAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) +def test_database_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_database_admin_grpc_lro_client(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_database_admin_grpc_lro_async_client(): + client = DatabaseAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_backup_path(): + project = "squid" + instance = "clam" + backup = "whelk" + expected = "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) + actual = DatabaseAdminClient.backup_path(project, instance, backup) + assert expected == actual + + +def test_parse_backup_path(): + expected = { + "project": "octopus", + "instance": "oyster", + "backup": "nudibranch", + } + path = DatabaseAdminClient.backup_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_backup_path(path) + assert expected == actual + +def test_backup_schedule_path(): + project = "cuttlefish" + instance = "mussel" + database = "winkle" + schedule = "nautilus" + expected = "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) + actual = DatabaseAdminClient.backup_schedule_path(project, instance, database, schedule) + assert expected == actual + + +def test_parse_backup_schedule_path(): + expected = { + "project": "scallop", + "instance": "abalone", + "database": "squid", + "schedule": "clam", + } + path = DatabaseAdminClient.backup_schedule_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_backup_schedule_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "whelk" + location = "octopus" + key_ring = "oyster" + crypto_key = "nudibranch" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = DatabaseAdminClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "key_ring": "winkle", + "crypto_key": "nautilus", + } + path = DatabaseAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_crypto_key_path(path) + assert expected == actual + +def test_crypto_key_version_path(): + project = "scallop" + location = "abalone" + key_ring = "squid" + crypto_key = "clam" + crypto_key_version = "whelk" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) + actual = DatabaseAdminClient.crypto_key_version_path(project, location, key_ring, crypto_key, crypto_key_version) + assert expected == actual + + +def test_parse_crypto_key_version_path(): + expected = { + "project": "octopus", + "location": "oyster", + "key_ring": "nudibranch", + "crypto_key": "cuttlefish", + "crypto_key_version": "mussel", + } + path = DatabaseAdminClient.crypto_key_version_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_crypto_key_version_path(path) + assert expected == actual + +def test_database_path(): + project = "winkle" + instance = "nautilus" + database = "scallop" + expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) + actual = DatabaseAdminClient.database_path(project, instance, database) + assert expected == actual + + +def test_parse_database_path(): + expected = { + "project": "abalone", + "instance": "squid", + "database": "clam", + } + path = DatabaseAdminClient.database_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_database_path(path) + assert expected == actual + +def test_database_role_path(): + project = "whelk" + instance = "octopus" + database = "oyster" + role = "nudibranch" + expected = "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) + actual = DatabaseAdminClient.database_role_path(project, instance, database, role) + assert expected == actual + + +def test_parse_database_role_path(): + expected = { + "project": "cuttlefish", + "instance": "mussel", + "database": "winkle", + "role": "nautilus", + } + path = DatabaseAdminClient.database_role_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_database_role_path(path) + assert expected == actual + +def test_instance_path(): + project = "scallop" + instance = "abalone" + expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + actual = DatabaseAdminClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "squid", + "instance": "clam", + } + path = DatabaseAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_instance_path(path) + assert expected == actual + +def test_instance_partition_path(): + project = "whelk" + instance = "octopus" + instance_partition = "oyster" + expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + actual = DatabaseAdminClient.instance_partition_path(project, instance, instance_partition) + assert expected == actual + + +def test_parse_instance_partition_path(): + expected = { + "project": "nudibranch", + "instance": "cuttlefish", + "instance_partition": "mussel", + } + path = DatabaseAdminClient.instance_partition_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_instance_partition_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DatabaseAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = DatabaseAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format(folder=folder, ) + actual = DatabaseAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = DatabaseAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DatabaseAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = DatabaseAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format(project=project, ) + actual = DatabaseAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = DatabaseAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DatabaseAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = DatabaseAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DatabaseAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = DatabaseAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DatabaseAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = DatabaseAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport), + (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc new file mode 100644 index 0000000000..040d100fc8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/spanner_admin_instance/__init__.py + google/cloud/spanner_admin_instance/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/.flake8 b/owl-bot-staging/spanner_admin_instance/v1/.flake8 new file mode 100644 index 0000000000..90316de214 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/.flake8 @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +[flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 +exclude = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint + **/gapic/** + **/services/** + **/types/** + # Exclude Protobuf gencode + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/spanner_admin_instance/v1/LICENSE b/owl-bot-staging/spanner_admin_instance/v1/LICENSE new file mode 100644 index 0000000000..d645695673 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in new file mode 100644 index 0000000000..dae249ec89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/README.rst b/owl-bot-staging/spanner_admin_instance/v1/README.rst new file mode 100644 index 0000000000..7123886f79 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Spanner Admin Instance API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Spanner Admin Instance API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css new file mode 100644 index 0000000000..b0a295464b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html new file mode 100644 index 0000000000..95e9c77fcf --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py new file mode 100644 index 0000000000..4e0af4e74e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py @@ -0,0 +1,385 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-spanner-admin-instance documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.5.0" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-spanner-admin-instance" +copyright = u"2025, Google, LLC" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-instance", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-spanner-admin-instance-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-instance.tex", + u"google-cloud-spanner-admin-instance Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-spanner-admin-instance", + "google-cloud-spanner-admin-instance Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-spanner-admin-instance", + "google-cloud-spanner-admin-instance Documentation", + author, + "google-cloud-spanner-admin-instance", + "google-cloud-spanner-admin-instance Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst new file mode 100644 index 0000000000..545d74cbb9 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst @@ -0,0 +1,10 @@ +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + spanner_admin_instance_v1/services_ + spanner_admin_instance_v1/types_ diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst new file mode 100644 index 0000000000..536d17b2ea --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst new file mode 100644 index 0000000000..fe820b3fad --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst @@ -0,0 +1,10 @@ +InstanceAdmin +------------------------------- + +.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin + :members: + :inherited-members: + +.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst new file mode 100644 index 0000000000..407d44cc34 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Spanner Admin Instance v1 API +======================================================= +.. toctree:: + :maxdepth: 2 + + instance_admin diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst new file mode 100644 index 0000000000..250cf6bf9b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Spanner Admin Instance v1 API +==================================================== + +.. automodule:: google.cloud.spanner_admin_instance_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py new file mode 100644 index 0000000000..a729ed0a13 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_instance import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.spanner_admin_instance_v1.services.instance_admin.client import InstanceAdminClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin.async_client import InstanceAdminAsyncClient + +from google.cloud.spanner_admin_instance_v1.types.common import OperationProgress +from google.cloud.spanner_admin_instance_v1.types.common import ReplicaSelection +from google.cloud.spanner_admin_instance_v1.types.common import FulfillmentPeriod +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import AutoscalingConfig +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import FreeInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import Instance +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstanceConfig +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstancePartition +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceResponse +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaComputeCapacity +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaInfo +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionMetadata +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionRequest +from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceRequest + +__all__ = ('InstanceAdminClient', + 'InstanceAdminAsyncClient', + 'OperationProgress', + 'ReplicaSelection', + 'FulfillmentPeriod', + 'AutoscalingConfig', + 'CreateInstanceConfigMetadata', + 'CreateInstanceConfigRequest', + 'CreateInstanceMetadata', + 'CreateInstancePartitionMetadata', + 'CreateInstancePartitionRequest', + 'CreateInstanceRequest', + 'DeleteInstanceConfigRequest', + 'DeleteInstancePartitionRequest', + 'DeleteInstanceRequest', + 'FreeInstanceMetadata', + 'GetInstanceConfigRequest', + 'GetInstancePartitionRequest', + 'GetInstanceRequest', + 'Instance', + 'InstanceConfig', + 'InstancePartition', + 'ListInstanceConfigOperationsRequest', + 'ListInstanceConfigOperationsResponse', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'ListInstancePartitionOperationsRequest', + 'ListInstancePartitionOperationsResponse', + 'ListInstancePartitionsRequest', + 'ListInstancePartitionsResponse', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'MoveInstanceMetadata', + 'MoveInstanceRequest', + 'MoveInstanceResponse', + 'ReplicaComputeCapacity', + 'ReplicaInfo', + 'UpdateInstanceConfigMetadata', + 'UpdateInstanceConfigRequest', + 'UpdateInstanceMetadata', + 'UpdateInstancePartitionMetadata', + 'UpdateInstancePartitionRequest', + 'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed new file mode 100644 index 0000000000..915a8e55e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py new file mode 100644 index 0000000000..384e545d89 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.instance_admin import InstanceAdminClient +from .services.instance_admin import InstanceAdminAsyncClient + +from .types.common import OperationProgress +from .types.common import ReplicaSelection +from .types.common import FulfillmentPeriod +from .types.spanner_instance_admin import AutoscalingConfig +from .types.spanner_instance_admin import CreateInstanceConfigMetadata +from .types.spanner_instance_admin import CreateInstanceConfigRequest +from .types.spanner_instance_admin import CreateInstanceMetadata +from .types.spanner_instance_admin import CreateInstancePartitionMetadata +from .types.spanner_instance_admin import CreateInstancePartitionRequest +from .types.spanner_instance_admin import CreateInstanceRequest +from .types.spanner_instance_admin import DeleteInstanceConfigRequest +from .types.spanner_instance_admin import DeleteInstancePartitionRequest +from .types.spanner_instance_admin import DeleteInstanceRequest +from .types.spanner_instance_admin import FreeInstanceMetadata +from .types.spanner_instance_admin import GetInstanceConfigRequest +from .types.spanner_instance_admin import GetInstancePartitionRequest +from .types.spanner_instance_admin import GetInstanceRequest +from .types.spanner_instance_admin import Instance +from .types.spanner_instance_admin import InstanceConfig +from .types.spanner_instance_admin import InstancePartition +from .types.spanner_instance_admin import ListInstanceConfigOperationsRequest +from .types.spanner_instance_admin import ListInstanceConfigOperationsResponse +from .types.spanner_instance_admin import ListInstanceConfigsRequest +from .types.spanner_instance_admin import ListInstanceConfigsResponse +from .types.spanner_instance_admin import ListInstancePartitionOperationsRequest +from .types.spanner_instance_admin import ListInstancePartitionOperationsResponse +from .types.spanner_instance_admin import ListInstancePartitionsRequest +from .types.spanner_instance_admin import ListInstancePartitionsResponse +from .types.spanner_instance_admin import ListInstancesRequest +from .types.spanner_instance_admin import ListInstancesResponse +from .types.spanner_instance_admin import MoveInstanceMetadata +from .types.spanner_instance_admin import MoveInstanceRequest +from .types.spanner_instance_admin import MoveInstanceResponse +from .types.spanner_instance_admin import ReplicaComputeCapacity +from .types.spanner_instance_admin import ReplicaInfo +from .types.spanner_instance_admin import UpdateInstanceConfigMetadata +from .types.spanner_instance_admin import UpdateInstanceConfigRequest +from .types.spanner_instance_admin import UpdateInstanceMetadata +from .types.spanner_instance_admin import UpdateInstancePartitionMetadata +from .types.spanner_instance_admin import UpdateInstancePartitionRequest +from .types.spanner_instance_admin import UpdateInstanceRequest + +__all__ = ( + 'InstanceAdminAsyncClient', +'AutoscalingConfig', +'CreateInstanceConfigMetadata', +'CreateInstanceConfigRequest', +'CreateInstanceMetadata', +'CreateInstancePartitionMetadata', +'CreateInstancePartitionRequest', +'CreateInstanceRequest', +'DeleteInstanceConfigRequest', +'DeleteInstancePartitionRequest', +'DeleteInstanceRequest', +'FreeInstanceMetadata', +'FulfillmentPeriod', +'GetInstanceConfigRequest', +'GetInstancePartitionRequest', +'GetInstanceRequest', +'Instance', +'InstanceAdminClient', +'InstanceConfig', +'InstancePartition', +'ListInstanceConfigOperationsRequest', +'ListInstanceConfigOperationsResponse', +'ListInstanceConfigsRequest', +'ListInstanceConfigsResponse', +'ListInstancePartitionOperationsRequest', +'ListInstancePartitionOperationsResponse', +'ListInstancePartitionsRequest', +'ListInstancePartitionsResponse', +'ListInstancesRequest', +'ListInstancesResponse', +'MoveInstanceMetadata', +'MoveInstanceRequest', +'MoveInstanceResponse', +'OperationProgress', +'ReplicaComputeCapacity', +'ReplicaInfo', +'ReplicaSelection', +'UpdateInstanceConfigMetadata', +'UpdateInstanceConfigRequest', +'UpdateInstanceMetadata', +'UpdateInstancePartitionMetadata', +'UpdateInstancePartitionRequest', +'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json new file mode 100644 index 0000000000..60fa46718a --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json @@ -0,0 +1,343 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.spanner_admin_instance_v1", + "protoPackage": "google.spanner.admin.instance.v1", + "schema": "1.0", + "services": { + "InstanceAdmin": { + "clients": { + "grpc": { + "libraryClient": "InstanceAdminClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateInstanceConfig": { + "methods": [ + "create_instance_config" + ] + }, + "CreateInstancePartition": { + "methods": [ + "create_instance_partition" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteInstanceConfig": { + "methods": [ + "delete_instance_config" + ] + }, + "DeleteInstancePartition": { + "methods": [ + "delete_instance_partition" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "GetInstancePartition": { + "methods": [ + "get_instance_partition" + ] + }, + "ListInstanceConfigOperations": { + "methods": [ + "list_instance_config_operations" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstancePartitionOperations": { + "methods": [ + "list_instance_partition_operations" + ] + }, + "ListInstancePartitions": { + "methods": [ + "list_instance_partitions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateInstancePartition": { + "methods": [ + "update_instance_partition" + ] + } + } + }, + "grpc-async": { + "libraryClient": "InstanceAdminAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateInstanceConfig": { + "methods": [ + "create_instance_config" + ] + }, + "CreateInstancePartition": { + "methods": [ + "create_instance_partition" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteInstanceConfig": { + "methods": [ + "delete_instance_config" + ] + }, + "DeleteInstancePartition": { + "methods": [ + "delete_instance_partition" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "GetInstancePartition": { + "methods": [ + "get_instance_partition" + ] + }, + "ListInstanceConfigOperations": { + "methods": [ + "list_instance_config_operations" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstancePartitionOperations": { + "methods": [ + "list_instance_partition_operations" + ] + }, + "ListInstancePartitions": { + "methods": [ + "list_instance_partitions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateInstancePartition": { + "methods": [ + "update_instance_partition" + ] + } + } + }, + "rest": { + "libraryClient": "InstanceAdminClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "CreateInstanceConfig": { + "methods": [ + "create_instance_config" + ] + }, + "CreateInstancePartition": { + "methods": [ + "create_instance_partition" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "DeleteInstanceConfig": { + "methods": [ + "delete_instance_config" + ] + }, + "DeleteInstancePartition": { + "methods": [ + "delete_instance_partition" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "GetInstanceConfig": { + "methods": [ + "get_instance_config" + ] + }, + "GetInstancePartition": { + "methods": [ + "get_instance_partition" + ] + }, + "ListInstanceConfigOperations": { + "methods": [ + "list_instance_config_operations" + ] + }, + "ListInstanceConfigs": { + "methods": [ + "list_instance_configs" + ] + }, + "ListInstancePartitionOperations": { + "methods": [ + "list_instance_partition_operations" + ] + }, + "ListInstancePartitions": { + "methods": [ + "list_instance_partitions" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "MoveInstance": { + "methods": [ + "move_instance" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpdateInstanceConfig": { + "methods": [ + "update_instance_config" + ] + }, + "UpdateInstancePartition": { + "methods": [ + "update_instance_partition" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py new file mode 100644 index 0000000000..20a9cd975b --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed new file mode 100644 index 0000000000..915a8e55e3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py new file mode 100644 index 0000000000..cbf94b283c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py new file mode 100644 index 0000000000..72ef4ab187 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceAdminClient +from .async_client import InstanceAdminAsyncClient + +__all__ = ( + 'InstanceAdminClient', + 'InstanceAdminAsyncClient', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py new file mode 100644 index 0000000000..de12cfc17e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py @@ -0,0 +1,3468 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +import uuid + +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .client import InstanceAdminClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class InstanceAdminAsyncClient: + """Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + """ + + _client: InstanceAdminClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = InstanceAdminClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = InstanceAdminClient._DEFAULT_UNIVERSE + + instance_path = staticmethod(InstanceAdminClient.instance_path) + parse_instance_path = staticmethod(InstanceAdminClient.parse_instance_path) + instance_config_path = staticmethod(InstanceAdminClient.instance_config_path) + parse_instance_config_path = staticmethod(InstanceAdminClient.parse_instance_config_path) + instance_partition_path = staticmethod(InstanceAdminClient.instance_partition_path) + parse_instance_partition_path = staticmethod(InstanceAdminClient.parse_instance_partition_path) + common_billing_account_path = staticmethod(InstanceAdminClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(InstanceAdminClient.parse_common_billing_account_path) + common_folder_path = staticmethod(InstanceAdminClient.common_folder_path) + parse_common_folder_path = staticmethod(InstanceAdminClient.parse_common_folder_path) + common_organization_path = staticmethod(InstanceAdminClient.common_organization_path) + parse_common_organization_path = staticmethod(InstanceAdminClient.parse_common_organization_path) + common_project_path = staticmethod(InstanceAdminClient.common_project_path) + parse_common_project_path = staticmethod(InstanceAdminClient.parse_common_project_path) + common_location_path = staticmethod(InstanceAdminClient.common_location_path) + parse_common_location_path = staticmethod(InstanceAdminClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminAsyncClient: The constructed client. + """ + return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return InstanceAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> InstanceAdminTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceAdminTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = InstanceAdminClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance admin async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = InstanceAdminClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.instance_v1.InstanceAdminAsyncClient`.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "credentialsType": None, + } + ) + + async def list_instance_configs(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigsAsyncPager: + r"""Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]]): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + parent (:class:`str`): + Required. The name of the project for which a list of + supported instance configurations is requested. Values + are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): + request = spanner_instance_admin.ListInstanceConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstanceConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance_config(self, + request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstanceConfig: + r"""Gets information about a particular instance + configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]]): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + name (:class:`str`): + Required. The name of the requested instance + configuration. Values are of the form + ``projects//instanceConfigs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): + request = spanner_instance_admin.GetInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance_config(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + instance_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]]): + The request object. The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + parent (:class:`str`): + Required. The name of the project in which to create the + instance configuration. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): + Required. The ``InstanceConfig`` proto of the + configuration to create. ``instance_config.name`` must + be ``/instanceConfigs/``. + ``instance_config.base_config`` must be a Google-managed + configuration name, e.g. /instanceConfigs/us-east1, + /instanceConfigs/nam3. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config_id (:class:`str`): + Required. The ID of the instance configuration to + create. Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and + 64 characters in length. The ``custom-`` prefix is + required to avoid name conflicts with Google-managed + configurations. + + This corresponds to the ``instance_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_config, instance_config_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): + request = spanner_instance_admin.CreateInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_config is not None: + request.instance_config = instance_config + if instance_config_id is not None: + request.instance_config_id = instance_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + async def update_instance_config(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, + *, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]]): + The request object. The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): + Required. The user instance configuration to update, + which must always include the instance configuration + name. Otherwise, only fields mentioned in + [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] + need be included. To prevent conflicts of concurrent + updates, + [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + can be used. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + from being erased accidentally by clients that do not + know about them. Only display_name and labels can be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): + request = spanner_instance_admin.UpdateInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_config is not None: + request.instance_config = instance_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_config.name", request.instance_config.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance_config(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_config(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]]): + The request object. The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + name (:class:`str`): + Required. The name of the instance configuration to be + deleted. Values are of the form + ``projects//instanceConfigs/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): + request = spanner_instance_admin.DeleteInstanceConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_instance_config_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigOperationsAsyncPager: + r"""Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]]): + The request object. The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + parent (:class:`str`): + Required. The project of the instance configuration + operations. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager: + The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): + request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_config_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstanceConfigOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_instances(self, + request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all instances in the given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]]): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + parent (:class:`str`): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancesRequest): + request = spanner_instance_admin.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_instance_partitions(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionsAsyncPager: + r"""Lists all instance partitions for the given instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]]): + The request object. The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + parent (:class:`str`): + Required. The instance whose instance partitions should + be listed. Values are of the form + ``projects//instances/``. Use + ``{instance} = '-'`` to list instance partitions for all + Instances in a project, e.g., + ``projects/myproject/instances/-``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager: + The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): + request = spanner_instance_admin.ListInstancePartitionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancePartitionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance(self, + request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.Instance: + r"""Gets information about a particular instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]]): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + name (:class:`str`): + Required. The name of the requested instance. Values are + of the form ``projects//instances/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceRequest): + request = spanner_instance_admin.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[spanner_instance_admin.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]]): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + parent (:class:`str`): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` + and must be between 2 and 64 characters in length. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): + Required. The instance to create. The name may be + omitted, but if specified must be + ``/instances/``. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): + request = spanner_instance_admin.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.CreateInstanceMetadata, + ) + + # Done; return the response. + return response + + async def update_instance(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[spanner_instance_admin.Instance] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]]): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): + Required. The instance to update, which must always + include the instance name. Otherwise, only fields + mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] + should be updated. The field mask must always be + specified; this prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): + request = spanner_instance_admin.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.UpdateInstanceMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]]): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + name (:class:`str`): + Required. The name of the instance to be deleted. Values + are of the form + ``projects//instances/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): + request = spanner_instance_admin.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (:class:`MutableSequence[str]`): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance_partition(self, + request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstancePartition: + r"""Gets information about a particular instance + partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]]): + The request object. The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + name (:class:`str`): + Required. The name of the requested instance partition. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstancePartition: + An isolated set of Cloud Spanner + resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): + request = spanner_instance_admin.GetInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance_partition(self, + request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + instance_partition_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]]): + The request object. The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + parent (:class:`str`): + Required. The name of the instance in which to create + the instance partition. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): + Required. The instance partition to create. The + instance_partition.name may be omitted, but if specified + must be + ``/instancePartitions/``. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition_id (:class:`str`): + Required. The ID of the instance partition to create. + Valid identifiers are of the form + ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. + + This corresponds to the ``instance_partition_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_partition, instance_partition_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): + request = spanner_instance_admin.CreateInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_partition is not None: + request.instance_partition = instance_partition + if instance_partition_id is not None: + request.instance_partition_id = instance_partition_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance_partition(self, + request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_partition(request=request) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]]): + The request object. The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + name (:class:`str`): + Required. The name of the instance partition to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): + request = spanner_instance_admin.DeleteInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_instance_partition(self, + request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, + *, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]]): + The request object. The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): + Required. The instance partition to update, which must + always include the instance partition name. Otherwise, + only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] + need be included. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A mask specifying which fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_partition, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): + request = spanner_instance_admin.UpdateInstancePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_partition is not None: + request.instance_partition = instance_partition + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_partition.name", request.instance_partition.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + async def list_instance_partition_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionOperationsAsyncPager: + r"""Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]]): + The request object. The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + parent (:class:`str`): + Required. The parent instance of the instance partition + operations. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager: + The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): + request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partition_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancePartitionOperationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def move_instance(self, + request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + async def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]]): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): + request = spanner_instance_admin.MoveInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + spanner_instance_admin.MoveInstanceResponse, + metadata_type=spanner_instance_admin.MoveInstanceMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "InstanceAdminAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ( + "InstanceAdminAsyncClient", +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py new file mode 100644 index 0000000000..6f504cdc37 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py @@ -0,0 +1,3849 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import uuid +import warnings + +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import InstanceAdminGrpcTransport +from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .transports.rest import InstanceAdminRestTransport + + +class InstanceAdminClientMeta(type): + """Metaclass for the InstanceAdmin client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] + _transport_registry["grpc"] = InstanceAdminGrpcTransport + _transport_registry["grpc_asyncio"] = InstanceAdminGrpcAsyncIOTransport + _transport_registry["rest"] = InstanceAdminRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[InstanceAdminTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceAdminClient(metaclass=InstanceAdminClientMeta): + """Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "spanner.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceAdminClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceAdminTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceAdminTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_config_path(project: str,instance_config: str,) -> str: + """Returns a fully-qualified instance_config string.""" + return "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) + + @staticmethod + def parse_instance_config_path(path: str) -> Dict[str,str]: + """Parses a instance_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instanceConfigs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: + """Returns a fully-qualified instance_partition string.""" + return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + + @staticmethod + def parse_instance_partition_path(path: str) -> Dict[str,str]: + """Parses a instance_partition path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = InstanceAdminClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = InstanceAdminClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance admin client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceAdminTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = InstanceAdminClient._read_environment_variables() + self._client_cert_source = InstanceAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = InstanceAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, InstanceAdminTransport) + if transport_provided: + # transport is a InstanceAdminTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(InstanceAdminTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + InstanceAdminClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[InstanceAdminTransport], Callable[..., InstanceAdminTransport]] = ( + InstanceAdminClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstanceAdminTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.spanner.admin.instance_v1.InstanceAdminClient`.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "credentialsType": None, + } + ) + + def list_instance_configs(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigsPager: + r"""Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + parent (str): + Required. The name of the project for which a list of + supported instance configurations is requested. Values + are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): + request = spanner_instance_admin.ListInstanceConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstanceConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance_config(self, + request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstanceConfig: + r"""Gets information about a particular instance + configuration. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + name (str): + Required. The name of the requested instance + configuration. Values are of the form + ``projects//instanceConfigs/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): + request = spanner_instance_admin.GetInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance_config(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + instance_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]): + The request object. The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + parent (str): + Required. The name of the project in which to create the + instance configuration. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The ``InstanceConfig`` proto of the + configuration to create. ``instance_config.name`` must + be ``/instanceConfigs/``. + ``instance_config.base_config`` must be a Google-managed + configuration name, e.g. /instanceConfigs/us-east1, + /instanceConfigs/nam3. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_config_id (str): + Required. The ID of the instance configuration to + create. Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and + 64 characters in length. The ``custom-`` prefix is + required to avoid name conflicts with Google-managed + configurations. + + This corresponds to the ``instance_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_config, instance_config_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): + request = spanner_instance_admin.CreateInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_config is not None: + request.instance_config = instance_config + if instance_config_id is not None: + request.instance_config_id = instance_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + def update_instance_config(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, + *, + instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]): + The request object. The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The user instance configuration to update, + which must always include the instance configuration + name. Otherwise, only fields mentioned in + [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] + need be included. To prevent conflicts of concurrent + updates, + [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + can be used. + + This corresponds to the ``instance_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + from being erased accidentally by clients that do not + know about them. Only display_name and labels can be + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations + define the geographic placement of nodes and their + replication. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): + request = spanner_instance_admin.UpdateInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_config is not None: + request.instance_config = instance_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_config.name", request.instance_config.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstanceConfig, + metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, + ) + + # Done; return the response. + return response + + def delete_instance_config(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_config(request=request) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]): + The request object. The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + name (str): + Required. The name of the instance configuration to be + deleted. Values are of the form + ``projects//instanceConfigs/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): + request = spanner_instance_admin.DeleteInstanceConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_instance_config_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstanceConfigOperationsPager: + r"""Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]): + The request object. The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + parent (str): + Required. The project of the instance configuration + operations. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager: + The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): + request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_config_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstanceConfigOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instances(self, + request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all instances in the given project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + parent (str): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancesRequest): + request = spanner_instance_admin.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_instance_partitions(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionsPager: + r"""Lists all instance partitions for the given instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]): + The request object. The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + parent (str): + Required. The instance whose instance partitions should + be listed. Values are of the form + ``projects//instances/``. Use + ``{instance} = '-'`` to list instance partitions for all + Instances in a project, e.g., + ``projects/myproject/instances/-``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager: + The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): + request = spanner_instance_admin.ListInstancePartitionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_partitions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancePartitionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.Instance: + r"""Gets information about a particular instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + name (str): + Required. The name of the requested instance. Values are + of the form ``projects//instances/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstanceRequest): + request = spanner_instance_admin.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[spanner_instance_admin.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + parent (str): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` + and must be between 2 and 64 characters in length. + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to create. The name may be + omitted, but if specified must be + ``/instances/``. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): + request = spanner_instance_admin.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.CreateInstanceMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, + *, + instance: Optional[spanner_instance_admin.Instance] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to update, which must always + include the instance name. Otherwise, only fields + mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] + should be updated. The field mask must always be + specified; this prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.spanner_admin_instance_v1.types.Instance` + An isolated set of Cloud Spanner resources on which + databases can be hosted. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): + request = spanner_instance_admin.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance is not None: + request.instance = instance + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.Instance, + metadata_type=spanner_instance_admin.UpdateInstanceMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + client.delete_instance(request=request) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + name (str): + Required. The name of the instance to be deleted. Values + are of the form + ``projects//instances/`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): + request = spanner_instance_admin.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + resource: Optional[str] = None, + permissions: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + resource (str): + REQUIRED: The resource for which the + policy detail is being requested. See + the operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permissions (MutableSequence[str]): + The set of permissions to check for the ``resource``. + Permissions with wildcards (such as '*' or 'storage.*') + are not allowed. For more information see `IAM + Overview `__. + + This corresponds to the ``permissions`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource, permissions] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + if resource is not None: + request.resource = resource + if permissions: + request.permissions.extend(permissions) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance_partition(self, + request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> spanner_instance_admin.InstancePartition: + r"""Gets information about a particular instance + partition. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]): + The request object. The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + name (str): + Required. The name of the requested instance partition. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.types.InstancePartition: + An isolated set of Cloud Spanner + resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): + request = spanner_instance_admin.GetInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance_partition(self, + request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + instance_partition_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]): + The request object. The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + parent (str): + Required. The name of the instance in which to create + the instance partition. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to create. The + instance_partition.name may be omitted, but if specified + must be + ``/instancePartitions/``. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_partition_id (str): + Required. The ID of the instance partition to create. + Valid identifiers are of the form + ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. + + This corresponds to the ``instance_partition_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, instance_partition, instance_partition_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): + request = spanner_instance_admin.CreateInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_partition is not None: + request.instance_partition = instance_partition + if instance_partition_id is not None: + request.instance_partition_id = instance_partition_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + def delete_instance_partition(self, + request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_partition(request=request) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]): + The request object. The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + name (str): + Required. The name of the instance partition to be + deleted. Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): + request = spanner_instance_admin.DeleteInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_instance_partition(self, + request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, + *, + instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, + field_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]): + The request object. The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to update, which must + always include the instance partition name. Otherwise, + only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] + need be included. + + This corresponds to the ``instance_partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + should be updated. The field mask must always be + specified; this prevents any future fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + from being erased accidentally by clients that do not + know about them. + + This corresponds to the ``field_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define + placements on. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [instance_partition, field_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): + request = spanner_instance_admin.UpdateInstancePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if instance_partition is not None: + request.instance_partition = instance_partition + if field_mask is not None: + request.field_mask = field_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance_partition.name", request.instance_partition.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.InstancePartition, + metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, + ) + + # Done; return the response. + return response + + def list_instance_partition_operations(self, + request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancePartitionOperationsPager: + r"""Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]): + The request object. The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + parent (str): + Required. The parent instance of the instance partition + operations. Values are of the form + ``projects//instances/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager: + The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): + request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instance_partition_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancePartitionOperationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def move_instance(self, + request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import spanner_admin_instance_v1 + + def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): + request = spanner_instance_admin.MoveInstanceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.move_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + spanner_instance_admin.MoveInstanceResponse, + metadata_type=spanner_instance_admin.MoveInstanceMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceAdminClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ( + "InstanceAdminClient", +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py new file mode 100644 index 0000000000..bde9847337 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py @@ -0,0 +1,723 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.longrunning import operations_pb2 # type: ignore + + +class ListInstanceConfigsPager: + """A pager for iterating through ``list_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instance_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstanceConfigs`` requests and continue to iterate + through the ``instance_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstanceConfigsResponse], + request: spanner_instance_admin.ListInstanceConfigsRequest, + response: spanner_instance_admin.ListInstanceConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: + for page in self.pages: + yield from page.instance_configs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigsAsyncPager: + """A pager for iterating through ``list_instance_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instance_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstanceConfigs`` requests and continue to iterate + through the ``instance_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]], + request: spanner_instance_admin.ListInstanceConfigsRequest, + response: spanner_instance_admin.ListInstanceConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.instance_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigOperationsPager: + """A pager for iterating through ``list_instance_config_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstanceConfigOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstanceConfigOperationsResponse], + request: spanner_instance_admin.ListInstanceConfigOperationsRequest, + response: spanner_instance_admin.ListInstanceConfigOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstanceConfigOperationsAsyncPager: + """A pager for iterating through ``list_instance_config_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstanceConfigOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]], + request: spanner_instance_admin.ListInstanceConfigOperationsRequest, + response: spanner_instance_admin.ListInstanceConfigOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancesResponse], + request: spanner_instance_admin.ListInstancesRequest, + response: spanner_instance_admin.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancesResponse]], + request: spanner_instance_admin.ListInstancesRequest, + response: spanner_instance_admin.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionsPager: + """A pager for iterating through ``list_instance_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instance_partitions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstancePartitions`` requests and continue to iterate + through the ``instance_partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancePartitionsResponse], + request: spanner_instance_admin.ListInstancePartitionsRequest, + response: spanner_instance_admin.ListInstancePartitionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[spanner_instance_admin.InstancePartition]: + for page in self.pages: + yield from page.instance_partitions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionsAsyncPager: + """A pager for iterating through ``list_instance_partitions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instance_partitions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstancePartitions`` requests and continue to iterate + through the ``instance_partitions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]], + request: spanner_instance_admin.ListInstancePartitionsRequest, + response: spanner_instance_admin.ListInstancePartitionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstancePartition]: + async def async_generator(): + async for page in self.pages: + for response in page.instance_partitions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionOperationsPager: + """A pager for iterating through ``list_instance_partition_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstancePartitionOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., spanner_instance_admin.ListInstancePartitionOperationsResponse], + request: spanner_instance_admin.ListInstancePartitionOperationsRequest, + response: spanner_instance_admin.ListInstancePartitionOperationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancePartitionOperationsAsyncPager: + """A pager for iterating through ``list_instance_partition_operations`` requests. + + This class thinly wraps an initial + :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstancePartitionOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]], + request: spanner_instance_admin.ListInstancePartitionOperationsRequest, + response: spanner_instance_admin.ListInstancePartitionOperationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): + The initial request object. + response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for response in page.operations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst new file mode 100644 index 0000000000..762ac0c765 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`InstanceAdminTransport` is the ABC for all transports. +- public child `InstanceAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `InstanceAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseInstanceAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `InstanceAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py new file mode 100644 index 0000000000..1892e6a551 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceAdminTransport +from .grpc import InstanceAdminGrpcTransport +from .grpc_asyncio import InstanceAdminGrpcAsyncIOTransport +from .rest import InstanceAdminRestTransport +from .rest import InstanceAdminRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] +_transport_registry['grpc'] = InstanceAdminGrpcTransport +_transport_registry['grpc_asyncio'] = InstanceAdminGrpcAsyncIOTransport +_transport_registry['rest'] = InstanceAdminRestTransport + +__all__ = ( + 'InstanceAdminTransport', + 'InstanceAdminGrpcTransport', + 'InstanceAdminGrpcAsyncIOTransport', + 'InstanceAdminRestTransport', + 'InstanceAdminRestInterceptor', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py new file mode 100644 index 0000000000..cac35e8288 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py @@ -0,0 +1,567 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InstanceAdminTransport(abc.ABC): + """Abstract transport class for InstanceAdmin.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', + ) + + DEFAULT_HOST: str = 'spanner.googleapis.com' + + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instance_configs: gapic_v1.method.wrap_method( + self.list_instance_configs, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_instance_config: gapic_v1.method.wrap_method( + self.get_instance_config, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance_config: gapic_v1.method.wrap_method( + self.create_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_config: gapic_v1.method.wrap_method( + self.update_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_config: gapic_v1.method.wrap_method( + self.delete_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_config_operations: gapic_v1.method.wrap_method( + self.list_instance_config_operations, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_instance_partitions: gapic_v1.method.wrap_method( + self.list_instance_partitions, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.get_instance_partition: gapic_v1.method.wrap_method( + self.get_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.create_instance_partition: gapic_v1.method.wrap_method( + self.create_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_partition: gapic_v1.method.wrap_method( + self.delete_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_partition: gapic_v1.method.wrap_method( + self.update_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_partition_operations: gapic_v1.method.wrap_method( + self.list_instance_partition_operations, + default_timeout=None, + client_info=client_info, + ), + self.move_instance: gapic_v1.method.wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + Union[ + spanner_instance_admin.ListInstanceConfigsResponse, + Awaitable[spanner_instance_admin.ListInstanceConfigsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + Union[ + spanner_instance_admin.InstanceConfig, + Awaitable[spanner_instance_admin.InstanceConfig] + ]]: + raise NotImplementedError() + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + Union[ + spanner_instance_admin.ListInstanceConfigOperationsResponse, + Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + Union[ + spanner_instance_admin.ListInstancesResponse, + Awaitable[spanner_instance_admin.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + Union[ + spanner_instance_admin.ListInstancePartitionsResponse, + Awaitable[spanner_instance_admin.ListInstancePartitionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + Union[ + spanner_instance_admin.Instance, + Awaitable[spanner_instance_admin.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + Union[ + spanner_instance_admin.InstancePartition, + Awaitable[spanner_instance_admin.InstancePartition] + ]]: + raise NotImplementedError() + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + Union[ + spanner_instance_admin.ListInstancePartitionOperationsResponse, + Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse] + ]]: + raise NotImplementedError() + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'InstanceAdminTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py new file mode 100644 index 0000000000..0319e519cc --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py @@ -0,0 +1,1359 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class InstanceAdminGrpcTransport(InstanceAdminTransport): + """gRPC backend transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + spanner_instance_admin.ListInstanceConfigsResponse]: + r"""Return a callable for the list instance configs method over gRPC. + + Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + Returns: + Callable[[~.ListInstanceConfigsRequest], + ~.ListInstanceConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_configs' not in self._stubs: + self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', + request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, + ) + return self._stubs['list_instance_configs'] + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + spanner_instance_admin.InstanceConfig]: + r"""Return a callable for the get instance config method over gRPC. + + Gets information about a particular instance + configuration. + + Returns: + Callable[[~.GetInstanceConfigRequest], + ~.InstanceConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_config' not in self._stubs: + self._stubs['get_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', + request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, + response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, + ) + return self._stubs['get_instance_config'] + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance config method over gRPC. + + Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + Returns: + Callable[[~.CreateInstanceConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_config' not in self._stubs: + self._stubs['create_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', + request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_config'] + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance config method over gRPC. + + Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.UpdateInstanceConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_config' not in self._stubs: + self._stubs['update_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', + request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_config'] + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance config method over gRPC. + + Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.DeleteInstanceConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_config' not in self._stubs: + self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', + request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_config'] + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + spanner_instance_admin.ListInstanceConfigOperationsResponse]: + r"""Return a callable for the list instance config + operations method over gRPC. + + Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Returns: + Callable[[~.ListInstanceConfigOperationsRequest], + ~.ListInstanceConfigOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_config_operations' not in self._stubs: + self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', + request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, + ) + return self._stubs['list_instance_config_operations'] + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + spanner_instance_admin.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in the given project. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', + request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + spanner_instance_admin.ListInstancePartitionsResponse]: + r"""Return a callable for the list instance partitions method over gRPC. + + Lists all instance partitions for the given instance. + + Returns: + Callable[[~.ListInstancePartitionsRequest], + ~.ListInstancePartitionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partitions' not in self._stubs: + self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', + request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, + ) + return self._stubs['list_instance_partitions'] + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + spanner_instance_admin.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets information about a particular instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', + request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, + response_deserializer=spanner_instance_admin.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', + request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', + request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', + request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + spanner_instance_admin.InstancePartition]: + r"""Return a callable for the get instance partition method over gRPC. + + Gets information about a particular instance + partition. + + Returns: + Callable[[~.GetInstancePartitionRequest], + ~.InstancePartition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_partition' not in self._stubs: + self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', + request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, + response_deserializer=spanner_instance_admin.InstancePartition.deserialize, + ) + return self._stubs['get_instance_partition'] + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance partition method over gRPC. + + Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Returns: + Callable[[~.CreateInstancePartitionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_partition' not in self._stubs: + self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', + request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_partition'] + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete instance partition method over gRPC. + + Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.DeleteInstancePartitionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_partition' not in self._stubs: + self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', + request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_partition'] + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance partition method over gRPC. + + Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.UpdateInstancePartitionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_partition' not in self._stubs: + self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', + request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_partition'] + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + spanner_instance_admin.ListInstancePartitionOperationsResponse]: + r"""Return a callable for the list instance partition + operations method over gRPC. + + Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + Returns: + Callable[[~.ListInstancePartitionOperationsRequest], + ~.ListInstancePartitionOperationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partition_operations' not in self._stubs: + self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', + request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, + ) + return self._stubs['list_instance_partition_operations'] + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the move instance method over gRPC. + + Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + Returns: + Callable[[~.MoveInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'move_instance' not in self._stubs: + self._stubs['move_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', + request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['move_instance'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'InstanceAdminGrpcTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py new file mode 100644 index 0000000000..01364ea833 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py @@ -0,0 +1,1560 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO +from .grpc import InstanceAdminGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): + """gRPC AsyncIO backend transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]]: + r"""Return a callable for the list instance configs method over gRPC. + + Lists the supported instance configurations for a + given project. + Returns both Google-managed configurations and + user-managed configurations. + + Returns: + Callable[[~.ListInstanceConfigsRequest], + Awaitable[~.ListInstanceConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_configs' not in self._stubs: + self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', + request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, + ) + return self._stubs['list_instance_configs'] + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + Awaitable[spanner_instance_admin.InstanceConfig]]: + r"""Return a callable for the get instance config method over gRPC. + + Gets information about a particular instance + configuration. + + Returns: + Callable[[~.GetInstanceConfigRequest], + Awaitable[~.InstanceConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_config' not in self._stubs: + self._stubs['get_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', + request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, + response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, + ) + return self._stubs['get_instance_config'] + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance config method over gRPC. + + Creates an instance configuration and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance configuration. The + instance configuration name is assigned by the caller. If the + named instance configuration already exists, + ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. + + Immediately after the request returns: + + - The instance configuration is readable via the API, with all + requested attributes. The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. Its state is ``CREATING``. + + While the operation is pending: + + - Cancelling the operation renders the instance configuration + immediately unreadable via the API. + - Except for deleting the creating resource, all other attempts + to modify the instance configuration are rejected. + + Upon completion of the returned operation: + + - Instances can be created using the instance configuration. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. Its state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track creation of the instance configuration. The + metadata field type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.create`` + permission on the resource + [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. + + Returns: + Callable[[~.CreateInstanceConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_config' not in self._stubs: + self._stubs['create_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', + request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_config'] + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance config method over gRPC. + + Updates an instance configuration. The returned long-running + operation can be used to track the progress of updating the + instance. If the named instance configuration does not exist, + returns ``NOT_FOUND``. + + Only user-managed configurations can be updated. + + Immediately after the request returns: + + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field is set to true. + + While the operation is pending: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. + The operation is guaranteed to succeed at undoing all changes, + after which point it terminates with a ``CANCELLED`` status. + - All other attempts to modify the instance configuration are + rejected. + - Reading the instance configuration via the API continues to + give the pre-request values. + + Upon completion of the returned operation: + + - Creating instances using the instance configuration uses the + new values. + - The new values of the instance configuration are readable via + the API. + - The instance configuration's + [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + field becomes false. + + The returned long-running operation will have a name of the + format ``/operations/`` and + can be used to track the instance configuration modification. + The metadata field type is + [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. + The response field type is + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], + if successful. + + Authorization requires ``spanner.instanceConfigs.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.UpdateInstanceConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_config' not in self._stubs: + self._stubs['update_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', + request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_config'] + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance config method over gRPC. + + Deletes the instance configuration. Deletion is only allowed + when no instances are using the configuration. If any instances + are using the configuration, returns ``FAILED_PRECONDITION``. + + Only user-managed configurations can be deleted. + + Authorization requires ``spanner.instanceConfigs.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstanceConfig.name]. + + Returns: + Callable[[~.DeleteInstanceConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_config' not in self._stubs: + self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', + request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_config'] + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]]: + r"""Return a callable for the list instance config + operations method over gRPC. + + Lists the user-managed instance configuration long-running + operations in the given project. An instance configuration + operation has a name of the form + ``projects//instanceConfigs//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Returns: + Callable[[~.ListInstanceConfigOperationsRequest], + Awaitable[~.ListInstanceConfigOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_config_operations' not in self._stubs: + self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', + request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, + ) + return self._stubs['list_instance_config_operations'] + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + Awaitable[spanner_instance_admin.ListInstancesResponse]]: + r"""Return a callable for the list instances method over gRPC. + + Lists all instances in the given project. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', + request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]]: + r"""Return a callable for the list instance partitions method over gRPC. + + Lists all instance partitions for the given instance. + + Returns: + Callable[[~.ListInstancePartitionsRequest], + Awaitable[~.ListInstancePartitionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partitions' not in self._stubs: + self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', + request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, + ) + return self._stubs['list_instance_partitions'] + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + Awaitable[spanner_instance_admin.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets information about a particular instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', + request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, + response_deserializer=spanner_instance_admin.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates an instance and begins preparing it to begin serving. + The returned long-running operation can be used to track the + progress of preparing the new instance. The instance name is + assigned by the caller. If the named instance already exists, + ``CreateInstance`` returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance is readable via the API, with all requested + attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance immediately + unreadable via the API. + - The instance can be deleted. + - All other attempts to modify the instance are rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can be created in the instance. + - The instance's allocated resource levels are readable via the + API. + - The instance's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track creation of the instance. The metadata field type + is + [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', + request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates an instance, and begins allocating or releasing + resources as requested. The returned long-running operation can + be used to track the progress of updating the instance. If the + named instance does not exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance's + allocation has been requested, billing is based on the + newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance are rejected. + - Reading the instance via the API continues to give the + pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance's tables. + - The instance's new resource levels are readable via the API. + + The returned long-running operation will have a name of the + format ``/operations/`` and can be + used to track the instance modification. The metadata field type + is + [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. + + Authorization requires ``spanner.instances.update`` permission + on the resource + [name][google.spanner.admin.instance.v1.Instance.name]. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', + request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes an instance. + + Immediately upon completion of the request: + + - Billing ceases for all of the instance's reserved resources. + + Soon afterward: + + - The instance and *all of its databases* immediately and + irrevocably disappear from the API. All data in the databases + is permanently deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', + request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy on an instance resource. Replaces + any existing policy. + + Authorization requires ``spanner.instances.setIamPolicy`` on + [resource][google.iam.v1.SetIamPolicyRequest.resource]. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for an instance resource. Returns + an empty policy if an instance exists but does not have a policy + set. + + Authorization requires ``spanner.instances.getIamPolicy`` on + [resource][google.iam.v1.GetIamPolicyRequest.resource]. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns permissions that the caller has on the specified + instance resource. + + Attempting this RPC on a non-existent Cloud Spanner instance + resource will result in a NOT_FOUND error if the user has + ``spanner.instances.list`` permission on the containing Google + Cloud Project. Otherwise returns an empty set of permissions. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + Awaitable[spanner_instance_admin.InstancePartition]]: + r"""Return a callable for the get instance partition method over gRPC. + + Gets information about a particular instance + partition. + + Returns: + Callable[[~.GetInstancePartitionRequest], + Awaitable[~.InstancePartition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_partition' not in self._stubs: + self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', + request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, + response_deserializer=spanner_instance_admin.InstancePartition.deserialize, + ) + return self._stubs['get_instance_partition'] + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance partition method over gRPC. + + Creates an instance partition and begins preparing it to be + used. The returned long-running operation can be used to track + the progress of preparing the new instance partition. The + instance partition name is assigned by the caller. If the named + instance partition already exists, ``CreateInstancePartition`` + returns ``ALREADY_EXISTS``. + + Immediately upon completion of this request: + + - The instance partition is readable via the API, with all + requested attributes but no allocated resources. Its state is + ``CREATING``. + + Until completion of the returned operation: + + - Cancelling the operation renders the instance partition + immediately unreadable via the API. + - The instance partition can be deleted. + - All other attempts to modify the instance partition are + rejected. + + Upon completion of the returned operation: + + - Billing for all successfully-allocated resources begins (some + types may have lower than the requested levels). + - Databases can start using this instance partition. + - The instance partition's allocated resource levels are + readable via the API. + - The instance partition's state becomes ``READY``. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track creation of the instance partition. The + metadata field type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Returns: + Callable[[~.CreateInstancePartitionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance_partition' not in self._stubs: + self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', + request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance_partition'] + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete instance partition method over gRPC. + + Deletes an existing instance partition. Requires that the + instance partition is not used by any database or backup and is + not the default instance partition of an instance. + + Authorization requires ``spanner.instancePartitions.delete`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.DeleteInstancePartitionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance_partition' not in self._stubs: + self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', + request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_instance_partition'] + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance partition method over gRPC. + + Updates an instance partition, and begins allocating or + releasing resources as requested. The returned long-running + operation can be used to track the progress of updating the + instance partition. If the named instance partition does not + exist, returns ``NOT_FOUND``. + + Immediately upon completion of this request: + + - For resource types for which a decrease in the instance + partition's allocation has been requested, billing is based on + the newly-requested level. + + Until completion of the returned operation: + + - Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], + and begins restoring resources to their pre-request values. + The operation is guaranteed to succeed at undoing all resource + changes, after which point it terminates with a ``CANCELLED`` + status. + - All other attempts to modify the instance partition are + rejected. + - Reading the instance partition via the API continues to give + the pre-request resource levels. + + Upon completion of the returned operation: + + - Billing begins for all successfully-allocated resources (some + types may have lower than the requested levels). + - All newly-reserved resources are available for serving the + instance partition's tables. + - The instance partition's new resource levels are readable via + the API. + + The returned long-running operation will have a name of the + format ``/operations/`` + and can be used to track the instance partition modification. + The metadata field type is + [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. + The response field type is + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], + if successful. + + Authorization requires ``spanner.instancePartitions.update`` + permission on the resource + [name][google.spanner.admin.instance.v1.InstancePartition.name]. + + Returns: + Callable[[~.UpdateInstancePartitionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance_partition' not in self._stubs: + self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', + request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance_partition'] + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]]: + r"""Return a callable for the list instance partition + operations method over gRPC. + + Lists instance partition long-running operations in the given + instance. An instance partition operation has a name of the form + ``projects//instances//instancePartitions//operations/``. + The long-running operation metadata field type + ``metadata.type_url`` describes the type of the metadata. + Operations returned include those that have + completed/failed/canceled within the last 7 days, and pending + operations. Operations returned are ordered by + ``operation.metadata.value.start_time`` in descending order + starting from the most recently started operation. + + Authorization requires + ``spanner.instancePartitionOperations.list`` permission on the + resource + [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. + + Returns: + Callable[[~.ListInstancePartitionOperationsRequest], + Awaitable[~.ListInstancePartitionOperationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instance_partition_operations' not in self._stubs: + self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', + request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, + response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, + ) + return self._stubs['list_instance_partition_operations'] + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the move instance method over gRPC. + + Moves an instance to the target instance configuration. You can + use the returned long-running operation to track the progress of + moving the instance. + + ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance + meets any of the following criteria: + + - Is undergoing a move to a different instance configuration + - Has backups + - Has an ongoing update + - Contains any CMEK-enabled databases + - Is a free trial instance + + While the operation is pending: + + - All other attempts to modify the instance, including changes + to its compute capacity, are rejected. + + - The following database and backup admin operations are + rejected: + + - ``DatabaseAdmin.CreateDatabase`` + - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if + default_leader is specified in the request.) + - ``DatabaseAdmin.RestoreDatabase`` + - ``DatabaseAdmin.CreateBackup`` + - ``DatabaseAdmin.CopyBackup`` + + - Both the source and target instance configurations are subject + to hourly compute and storage charges. + + - The instance might experience higher read-write latencies and + a higher transaction abort rate. However, moving an instance + doesn't cause any downtime. + + The returned long-running operation has a name of the format + ``/operations/`` and can be used to + track the move instance operation. The metadata field type is + [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. + The response field type is + [Instance][google.spanner.admin.instance.v1.Instance], if + successful. Cancelling the operation sets its metadata's + [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. + Cancellation is not immediate because it involves moving any + data previously moved to the target instance configuration back + to the original instance configuration. You can use this + operation to track the progress of the cancellation. Upon + successful completion of the cancellation, the operation + terminates with ``CANCELLED`` status. + + If not cancelled, upon completion of the returned operation: + + - The instance successfully moves to the target instance + configuration. + - You are billed for compute and storage in target instance + configuration. + + Authorization requires the ``spanner.instances.update`` + permission on the resource + [instance][google.spanner.admin.instance.v1.Instance]. + + For more details, see `Move an + instance `__. + + Returns: + Callable[[~.MoveInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'move_instance' not in self._stubs: + self._stubs['move_instance'] = self._logged_channel.unary_unary( + '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', + request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['move_instance'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instance_configs: self._wrap_method( + self.list_instance_configs, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.get_instance_config: self._wrap_method( + self.get_instance_config, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance_config: self._wrap_method( + self.create_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_config: self._wrap_method( + self.update_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_config: self._wrap_method( + self.delete_instance_config, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_config_operations: self._wrap_method( + self.list_instance_config_operations, + default_timeout=None, + client_info=client_info, + ), + self.list_instances: self._wrap_method( + self.list_instances, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.list_instance_partitions: self._wrap_method( + self.list_instance_partitions, + default_timeout=None, + client_info=client_info, + ), + self.get_instance: self._wrap_method( + self.get_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.create_instance: self._wrap_method( + self.create_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.update_instance: self._wrap_method( + self.update_instance, + default_timeout=3600.0, + client_info=client_info, + ), + self.delete_instance: self._wrap_method( + self.delete_instance, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=32.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=30.0, + client_info=client_info, + ), + self.get_instance_partition: self._wrap_method( + self.get_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.create_instance_partition: self._wrap_method( + self.create_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.delete_instance_partition: self._wrap_method( + self.delete_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.update_instance_partition: self._wrap_method( + self.update_instance_partition, + default_timeout=None, + client_info=client_info, + ), + self.list_instance_partition_operations: self._wrap_method( + self.list_instance_partition_operations, + default_timeout=None, + client_info=client_info, + ), + self.move_instance: self._wrap_method( + self.move_instance, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'InstanceAdminGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py new file mode 100644 index 0000000000..1d9124d35d --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py @@ -0,0 +1,4462 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.api_core import operations_v1 + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseInstanceAdminRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InstanceAdminRestInterceptor: + """Interceptor for InstanceAdmin. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceAdminRestTransport. + + .. code-block:: python + class MyCustomInstanceAdminInterceptor(InstanceAdminRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance_partition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_partition(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_config_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_config_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_partition_operations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_partition_operations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instance_partitions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instance_partitions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_move_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_move_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance_partition(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance_partition(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceAdminRestTransport(interceptor=MyCustomInstanceAdminInterceptor()) + client = InstanceAdminClient(transport=transport) + + + """ + def pre_create_instance(self, request: spanner_instance_admin.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. + """ + return response + + def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + + def pre_create_instance_config(self, request: spanner_instance_admin.CreateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_create_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance_config + + DEPRECATED. Please use the `post_create_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_create_instance_config` interceptor runs + before the `post_create_instance_config_with_metadata` interceptor. + """ + return response + + def post_create_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_config_with_metadata` + interceptor in new development instead of the `post_create_instance_config` interceptor. + When both interceptors are used, this `post_create_instance_config_with_metadata` interceptor runs after the + `post_create_instance_config` interceptor. The (possibly modified) response returned by + `post_create_instance_config` will be passed to + `post_create_instance_config_with_metadata`. + """ + return response, metadata + + def pre_create_instance_partition(self, request: spanner_instance_admin.CreateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_create_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance_partition + + DEPRECATED. Please use the `post_create_instance_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_create_instance_partition` interceptor runs + before the `post_create_instance_partition_with_metadata` interceptor. + """ + return response + + def post_create_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_partition_with_metadata` + interceptor in new development instead of the `post_create_instance_partition` interceptor. + When both interceptors are used, this `post_create_instance_partition_with_metadata` interceptor runs after the + `post_create_instance_partition` interceptor. The (possibly modified) response returned by + `post_create_instance_partition` will be passed to + `post_create_instance_partition_with_metadata`. + """ + return response, metadata + + def pre_delete_instance(self, request: spanner_instance_admin.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def pre_delete_instance_config(self, request: spanner_instance_admin.DeleteInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def pre_delete_instance_partition(self, request: spanner_instance_admin.DeleteInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_get_instance(self, request: spanner_instance_admin.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_instance(self, response: spanner_instance_admin.Instance) -> spanner_instance_admin.Instance: + """Post-rpc interceptor for get_instance + + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. + """ + return response + + def post_get_instance_with_metadata(self, response: spanner_instance_admin.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + + def pre_get_instance_config(self, request: spanner_instance_admin.GetInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_instance_config(self, response: spanner_instance_admin.InstanceConfig) -> spanner_instance_admin.InstanceConfig: + """Post-rpc interceptor for get_instance_config + + DEPRECATED. Please use the `post_get_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_instance_config` interceptor runs + before the `post_get_instance_config_with_metadata` interceptor. + """ + return response + + def post_get_instance_config_with_metadata(self, response: spanner_instance_admin.InstanceConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstanceConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_config_with_metadata` + interceptor in new development instead of the `post_get_instance_config` interceptor. + When both interceptors are used, this `post_get_instance_config_with_metadata` interceptor runs after the + `post_get_instance_config` interceptor. The (possibly modified) response returned by + `post_get_instance_config` will be passed to + `post_get_instance_config_with_metadata`. + """ + return response, metadata + + def pre_get_instance_partition(self, request: spanner_instance_admin.GetInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_instance_partition(self, response: spanner_instance_admin.InstancePartition) -> spanner_instance_admin.InstancePartition: + """Post-rpc interceptor for get_instance_partition + + DEPRECATED. Please use the `post_get_instance_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_get_instance_partition` interceptor runs + before the `post_get_instance_partition_with_metadata` interceptor. + """ + return response + + def post_get_instance_partition_with_metadata(self, response: spanner_instance_admin.InstancePartition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstancePartition, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_partition_with_metadata` + interceptor in new development instead of the `post_get_instance_partition` interceptor. + When both interceptors are used, this `post_get_instance_partition_with_metadata` interceptor runs after the + `post_get_instance_partition` interceptor. The (possibly modified) response returned by + `post_get_instance_partition` will be passed to + `post_get_instance_partition_with_metadata`. + """ + return response, metadata + + def pre_list_instance_config_operations(self, request: spanner_instance_admin.ListInstanceConfigOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_config_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_config_operations(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: + """Post-rpc interceptor for list_instance_config_operations + + DEPRECATED. Please use the `post_list_instance_config_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_config_operations` interceptor runs + before the `post_list_instance_config_operations_with_metadata` interceptor. + """ + return response + + def post_list_instance_config_operations_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_config_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_config_operations_with_metadata` + interceptor in new development instead of the `post_list_instance_config_operations` interceptor. + When both interceptors are used, this `post_list_instance_config_operations_with_metadata` interceptor runs after the + `post_list_instance_config_operations` interceptor. The (possibly modified) response returned by + `post_list_instance_config_operations` will be passed to + `post_list_instance_config_operations_with_metadata`. + """ + return response, metadata + + def pre_list_instance_configs(self, request: spanner_instance_admin.ListInstanceConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_configs(self, response: spanner_instance_admin.ListInstanceConfigsResponse) -> spanner_instance_admin.ListInstanceConfigsResponse: + """Post-rpc interceptor for list_instance_configs + + DEPRECATED. Please use the `post_list_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_configs` interceptor runs + before the `post_list_instance_configs_with_metadata` interceptor. + """ + return response + + def post_list_instance_configs_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_configs_with_metadata` + interceptor in new development instead of the `post_list_instance_configs` interceptor. + When both interceptors are used, this `post_list_instance_configs_with_metadata` interceptor runs after the + `post_list_instance_configs` interceptor. The (possibly modified) response returned by + `post_list_instance_configs` will be passed to + `post_list_instance_configs_with_metadata`. + """ + return response, metadata + + def pre_list_instance_partition_operations(self, request: spanner_instance_admin.ListInstancePartitionOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_partition_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_partition_operations(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: + """Post-rpc interceptor for list_instance_partition_operations + + DEPRECATED. Please use the `post_list_instance_partition_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_partition_operations` interceptor runs + before the `post_list_instance_partition_operations_with_metadata` interceptor. + """ + return response + + def post_list_instance_partition_operations_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_partition_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_partition_operations_with_metadata` + interceptor in new development instead of the `post_list_instance_partition_operations` interceptor. + When both interceptors are used, this `post_list_instance_partition_operations_with_metadata` interceptor runs after the + `post_list_instance_partition_operations` interceptor. The (possibly modified) response returned by + `post_list_instance_partition_operations` will be passed to + `post_list_instance_partition_operations_with_metadata`. + """ + return response, metadata + + def pre_list_instance_partitions(self, request: spanner_instance_admin.ListInstancePartitionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instance_partitions + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instance_partitions(self, response: spanner_instance_admin.ListInstancePartitionsResponse) -> spanner_instance_admin.ListInstancePartitionsResponse: + """Post-rpc interceptor for list_instance_partitions + + DEPRECATED. Please use the `post_list_instance_partitions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instance_partitions` interceptor runs + before the `post_list_instance_partitions_with_metadata` interceptor. + """ + return response + + def post_list_instance_partitions_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instance_partitions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instance_partitions_with_metadata` + interceptor in new development instead of the `post_list_instance_partitions` interceptor. + When both interceptors are used, this `post_list_instance_partitions_with_metadata` interceptor runs after the + `post_list_instance_partitions` interceptor. The (possibly modified) response returned by + `post_list_instance_partitions` will be passed to + `post_list_instance_partitions_with_metadata`. + """ + return response, metadata + + def pre_list_instances(self, request: spanner_instance_admin.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_instances(self, response: spanner_instance_admin.ListInstancesResponse) -> spanner_instance_admin.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. + """ + return response + + def post_list_instances_with_metadata(self, response: spanner_instance_admin.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + def pre_move_instance(self, request: spanner_instance_admin.MoveInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.MoveInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for move_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_move_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for move_instance + + DEPRECATED. Please use the `post_move_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_move_instance` interceptor runs + before the `post_move_instance_with_metadata` interceptor. + """ + return response + + def post_move_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_move_instance_with_metadata` + interceptor in new development instead of the `post_move_instance` interceptor. + When both interceptors are used, this `post_move_instance_with_metadata` interceptor runs after the + `post_move_instance` interceptor. The (possibly modified) response returned by + `post_move_instance` will be passed to + `post_move_instance_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + def pre_update_instance(self, request: spanner_instance_admin.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. + """ + return response + + def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + def pre_update_instance_config(self, request: spanner_instance_admin.UpdateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_update_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance_config + + DEPRECATED. Please use the `post_update_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_update_instance_config` interceptor runs + before the `post_update_instance_config_with_metadata` interceptor. + """ + return response + + def post_update_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_config_with_metadata` + interceptor in new development instead of the `post_update_instance_config` interceptor. + When both interceptors are used, this `post_update_instance_config_with_metadata` interceptor runs after the + `post_update_instance_config` interceptor. The (possibly modified) response returned by + `post_update_instance_config` will be passed to + `post_update_instance_config_with_metadata`. + """ + return response, metadata + + def pre_update_instance_partition(self, request: spanner_instance_admin.UpdateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance_partition + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_update_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance_partition + + DEPRECATED. Please use the `post_update_instance_partition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. This `post_update_instance_partition` interceptor runs + before the `post_update_instance_partition_with_metadata` interceptor. + """ + return response + + def post_update_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance_partition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_partition_with_metadata` + interceptor in new development instead of the `post_update_instance_partition` interceptor. + When both interceptors are used, this `post_update_instance_partition_with_metadata` interceptor runs after the + `post_update_instance_partition` interceptor. The (possibly modified) response returned by + `post_update_instance_partition` will be passed to + `post_update_instance_partition_with_metadata`. + """ + return response, metadata + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceAdmin server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the InstanceAdmin server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceAdminRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceAdminRestInterceptor + + +class InstanceAdminRestTransport(_BaseInstanceAdminRestTransport): + """REST backend synchronous transport for InstanceAdmin. + + Cloud Spanner Instance Admin API + + The Cloud Spanner Instance Admin API can be used to create, + delete, modify and list instances. Instances are dedicated Cloud + Spanner serving and storage resources to be used by Cloud + Spanner databases. + + Each instance has a "configuration", which dictates where the + serving resources for the Cloud Spanner instance are located + (e.g., US-central, Europe). Configurations are created by Google + based on resource availability. + + Cloud Spanner billing is based on the instances that exist and + their sizes. After an instance exists, there are no additional + per-database or per-operation charges for use of the instance + (though there may be additional network bandwidth charges). + Instances offer isolation: problems with databases in one + instance will not affect other instances. However, within an + instance databases can affect each other. For example, if one + database in an instance receives a lot of requests and consumes + most of the instance resources, fewer resources are available + for other databases in that instance, and their performance may + suffer. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[InstanceAdminRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstanceAdminRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(_BaseInstanceAdminRestTransport._BaseCreateInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.spanner_instance_admin.CreateInstanceRequest): + The request object. The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_http_options() + + request, metadata = self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInstanceConfig(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CreateInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.CreateInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.CreateInstanceConfigRequest): + The request object. The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_create_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CreateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_config", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstanceConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateInstancePartition(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CreateInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.CreateInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.CreateInstancePartitionRequest): + The request object. The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_create_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CreateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_partition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CreateInstancePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInstance(_BaseInstanceAdminRestTransport._BaseDeleteInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete instance method over HTTP. + + Args: + request (~.spanner_instance_admin.DeleteInstanceRequest): + The request object. The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInstanceConfig(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.DeleteInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.DeleteInstanceConfigRequest): + The request object. The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteInstancePartition(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.DeleteInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ): + r"""Call the delete instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.DeleteInstancePartitionRequest): + The request object. The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetIamPolicy(_BaseInstanceAdminRestTransport._BaseGetIamPolicy, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_iam_policy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstance(_BaseInstanceAdminRestTransport._BaseGetInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.spanner_instance_admin.GetInstanceRequest): + The request object. The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.Instance: + An isolated set of Cloud Spanner + resources on which databases can be + hosted. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetInstance._get_http_options() + + request, metadata = self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.Instance() + pb_resp = spanner_instance_admin.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.Instance.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstanceConfig(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.GetInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.InstanceConfig: + r"""Call the get instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.GetInstanceConfigRequest): + The request object. The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.InstanceConfig: + A possible configuration for a Cloud + Spanner instance. Configurations define + the geographic placement of nodes and + their replication. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_get_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.InstanceConfig() + pb_resp = spanner_instance_admin.InstanceConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.InstanceConfig.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_config", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstanceConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstancePartition(_BaseInstanceAdminRestTransport._BaseGetInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.GetInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.InstancePartition: + r"""Call the get instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.GetInstancePartitionRequest): + The request object. The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.InstancePartition: + An isolated set of Cloud Spanner + resources that databases can define + placements on. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_get_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.InstancePartition() + pb_resp = spanner_instance_admin.InstancePartition.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.InstancePartition.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_partition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetInstancePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstanceConfigOperations(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstanceConfigOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstanceConfigOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: + r"""Call the list instance config + operations method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstanceConfigOperationsRequest): + The request object. The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstanceConfigOperationsResponse: + The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_config_operations(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstanceConfigOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstanceConfigOperationsResponse() + pb_resp = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_config_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_config_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_config_operations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstanceConfigs(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstanceConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstanceConfigsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstanceConfigsResponse: + r"""Call the list instance configs method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstanceConfigsRequest): + The request object. The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstanceConfigsResponse: + The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_configs(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigs", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstanceConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstanceConfigsResponse() + pb_resp = spanner_instance_admin.ListInstanceConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_configs_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstanceConfigsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_configs", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstanceConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstancePartitionOperations(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstancePartitionOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstancePartitionOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: + r"""Call the list instance partition + operations method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstancePartitionOperationsRequest): + The request object. The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstancePartitionOperationsResponse: + The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_partition_operations(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitionOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitionOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstancePartitionOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstancePartitionOperationsResponse() + pb_resp = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_partition_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_partition_operations_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partition_operations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitionOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstancePartitions(_BaseInstanceAdminRestTransport._BaseListInstancePartitions, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstancePartitions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstancePartitionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstancePartitionsResponse: + r"""Call the list instance partitions method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstancePartitionsRequest): + The request object. The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstancePartitionsResponse: + The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_http_options() + + request, metadata = self._interceptor.pre_list_instance_partitions(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstancePartitions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstancePartitionsResponse() + pb_resp = spanner_instance_admin.ListInstancePartitionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instance_partitions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instance_partitions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstancePartitionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partitions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstancePartitions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstances(_BaseInstanceAdminRestTransport._BaseListInstances, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListInstances") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: spanner_instance_admin.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> spanner_instance_admin.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.spanner_instance_admin.ListInstancesRequest): + The request object. The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.spanner_instance_admin.ListInstancesResponse: + The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListInstances._get_http_options() + + request, metadata = self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstances", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = spanner_instance_admin.ListInstancesResponse() + pb_resp = spanner_instance_admin.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = spanner_instance_admin.ListInstancesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instances", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _MoveInstance(_BaseInstanceAdminRestTransport._BaseMoveInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.MoveInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.MoveInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the move instance method over HTTP. + + Args: + request (~.spanner_instance_admin.MoveInstanceRequest): + The request object. The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_http_options() + + request, metadata = self._interceptor.pre_move_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.MoveInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "MoveInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._MoveInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_move_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.move_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "MoveInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy(_BaseInstanceAdminRestTransport._BaseSetIamPolicy, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.SetIamPolicy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = policy_pb2.Policy() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.set_iam_policy", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions(_BaseInstanceAdminRestTransport._BaseTestIamPermissions, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.TestIamPermissions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.test_iam_permissions", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstance(_BaseInstanceAdminRestTransport._BaseUpdateInstance, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.UpdateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.spanner_instance_admin.UpdateInstanceRequest): + The request object. The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_http_options() + + request, metadata = self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstanceConfig(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.UpdateInstanceConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.UpdateInstanceConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance config method over HTTP. + + Args: + request (~.spanner_instance_admin.UpdateInstanceConfigRequest): + The request object. The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_http_options() + + request, metadata = self._interceptor.pre_update_instance_config(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstanceConfig", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstanceConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._UpdateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_config_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_config", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstanceConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstancePartition(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.UpdateInstancePartition") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: spanner_instance_admin.UpdateInstancePartitionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance partition method over HTTP. + + Args: + request (~.spanner_instance_admin.UpdateInstancePartitionRequest): + The request object. The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_http_options() + + request, metadata = self._interceptor.pre_update_instance_partition(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_transcoded_request(http_options, request) + + body = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstancePartition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstancePartition", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._UpdateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance_partition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_partition_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_partition", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "UpdateInstancePartition", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_instance(self) -> Callable[ + [spanner_instance_admin.CreateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance_config(self) -> Callable[ + [spanner_instance_admin.CreateInstanceConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_instance_partition(self) -> Callable[ + [spanner_instance_admin.CreateInstancePartitionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance_config(self) -> Callable[ + [spanner_instance_admin.DeleteInstanceConfigRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance_partition(self) -> Callable[ + [spanner_instance_admin.DeleteInstancePartitionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [spanner_instance_admin.GetInstanceRequest], + spanner_instance_admin.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_config(self) -> Callable[ + [spanner_instance_admin.GetInstanceConfigRequest], + spanner_instance_admin.InstanceConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_partition(self) -> Callable[ + [spanner_instance_admin.GetInstancePartitionRequest], + spanner_instance_admin.InstancePartition]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_config_operations(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigOperationsRequest], + spanner_instance_admin.ListInstanceConfigOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstanceConfigOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_configs(self) -> Callable[ + [spanner_instance_admin.ListInstanceConfigsRequest], + spanner_instance_admin.ListInstanceConfigsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_partition_operations(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionOperationsRequest], + spanner_instance_admin.ListInstancePartitionOperationsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstancePartitionOperations(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instance_partitions(self) -> Callable[ + [spanner_instance_admin.ListInstancePartitionsRequest], + spanner_instance_admin.ListInstancePartitionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstancePartitions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [spanner_instance_admin.ListInstancesRequest], + spanner_instance_admin.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def move_instance(self) -> Callable[ + [spanner_instance_admin.MoveInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._MoveInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance_config(self) -> Callable[ + [spanner_instance_admin.UpdateInstanceConfigRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance_partition(self) -> Callable[ + [spanner_instance_admin.UpdateInstancePartitionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstancePartition(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseInstanceAdminRestTransport._BaseCancelOperation, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CancelOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseInstanceAdminRestTransport._BaseDeleteOperation, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseInstanceAdminRestTransport._BaseGetOperation, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.GetOperation", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseInstanceAdminRestTransport._BaseListOperations, InstanceAdminRestStub): + def __hash__(self): + return hash("InstanceAdminRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseInstanceAdminRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseInstanceAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseInstanceAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstanceAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.ListOperations", + extra = { + "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'InstanceAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py new file mode 100644 index 0000000000..ef9a29eaf8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py @@ -0,0 +1,1152 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseInstanceAdminRestTransport(InstanceAdminTransport): + """Base REST backend transport for InstanceAdmin. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'spanner.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'spanner.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/instances', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/instanceConfigs', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.CreateInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.CreateInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.DeleteInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.DeleteInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*}:getIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.GetInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.GetInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstanceConfigOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/instanceConfigOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstanceConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/instanceConfigs', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstanceConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstancePartitionOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitionOperations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstancePartitions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstancePartitionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListInstances: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*}/instances', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseMoveInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*}:move', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.MoveInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseMoveInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/instances/*}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance.name=projects/*/instances/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstanceConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance_config.name=projects/*/instanceConfigs/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.UpdateInstanceConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateInstancePartition: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = spanner_instance_admin.UpdateInstancePartitionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=True + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=True, + )) + query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', + }, + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', + }, + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseInstanceAdminRestTransport', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py new file mode 100644 index 0000000000..385c06fb99 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .common import ( + OperationProgress, + ReplicaSelection, + FulfillmentPeriod, +) +from .spanner_instance_admin import ( + AutoscalingConfig, + CreateInstanceConfigMetadata, + CreateInstanceConfigRequest, + CreateInstanceMetadata, + CreateInstancePartitionMetadata, + CreateInstancePartitionRequest, + CreateInstanceRequest, + DeleteInstanceConfigRequest, + DeleteInstancePartitionRequest, + DeleteInstanceRequest, + FreeInstanceMetadata, + GetInstanceConfigRequest, + GetInstancePartitionRequest, + GetInstanceRequest, + Instance, + InstanceConfig, + InstancePartition, + ListInstanceConfigOperationsRequest, + ListInstanceConfigOperationsResponse, + ListInstanceConfigsRequest, + ListInstanceConfigsResponse, + ListInstancePartitionOperationsRequest, + ListInstancePartitionOperationsResponse, + ListInstancePartitionsRequest, + ListInstancePartitionsResponse, + ListInstancesRequest, + ListInstancesResponse, + MoveInstanceMetadata, + MoveInstanceRequest, + MoveInstanceResponse, + ReplicaComputeCapacity, + ReplicaInfo, + UpdateInstanceConfigMetadata, + UpdateInstanceConfigRequest, + UpdateInstanceMetadata, + UpdateInstancePartitionMetadata, + UpdateInstancePartitionRequest, + UpdateInstanceRequest, +) + +__all__ = ( + 'OperationProgress', + 'ReplicaSelection', + 'FulfillmentPeriod', + 'AutoscalingConfig', + 'CreateInstanceConfigMetadata', + 'CreateInstanceConfigRequest', + 'CreateInstanceMetadata', + 'CreateInstancePartitionMetadata', + 'CreateInstancePartitionRequest', + 'CreateInstanceRequest', + 'DeleteInstanceConfigRequest', + 'DeleteInstancePartitionRequest', + 'DeleteInstanceRequest', + 'FreeInstanceMetadata', + 'GetInstanceConfigRequest', + 'GetInstancePartitionRequest', + 'GetInstanceRequest', + 'Instance', + 'InstanceConfig', + 'InstancePartition', + 'ListInstanceConfigOperationsRequest', + 'ListInstanceConfigOperationsResponse', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'ListInstancePartitionOperationsRequest', + 'ListInstancePartitionOperationsResponse', + 'ListInstancePartitionsRequest', + 'ListInstancePartitionsResponse', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'MoveInstanceMetadata', + 'MoveInstanceRequest', + 'MoveInstanceResponse', + 'ReplicaComputeCapacity', + 'ReplicaInfo', + 'UpdateInstanceConfigMetadata', + 'UpdateInstanceConfigRequest', + 'UpdateInstanceMetadata', + 'UpdateInstancePartitionMetadata', + 'UpdateInstancePartitionRequest', + 'UpdateInstanceRequest', +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py new file mode 100644 index 0000000000..5bc1c6b158 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.instance.v1', + manifest={ + 'FulfillmentPeriod', + 'OperationProgress', + 'ReplicaSelection', + }, +) + + +class FulfillmentPeriod(proto.Enum): + r"""Indicates the expected fulfillment period of an operation. + + Values: + FULFILLMENT_PERIOD_UNSPECIFIED (0): + Not specified. + FULFILLMENT_PERIOD_NORMAL (1): + Normal fulfillment period. The operation is + expected to complete within minutes. + FULFILLMENT_PERIOD_EXTENDED (2): + Extended fulfillment period. It can take up + to an hour for the operation to complete. + """ + FULFILLMENT_PERIOD_UNSPECIFIED = 0 + FULFILLMENT_PERIOD_NORMAL = 1 + FULFILLMENT_PERIOD_EXTENDED = 2 + + +class OperationProgress(proto.Message): + r"""Encapsulates progress related information for a Cloud Spanner + long running instance operations. + + Attributes: + progress_percent (int): + Percent completion of the operation. + Values are between 0 and 100 inclusive. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time the request was received. + end_time (google.protobuf.timestamp_pb2.Timestamp): + If set, the time at which this operation + failed or was completed successfully. + """ + + progress_percent: int = proto.Field( + proto.INT32, + number=1, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class ReplicaSelection(proto.Message): + r"""ReplicaSelection identifies replicas with common properties. + + Attributes: + location (str): + Required. Name of the location of the + replicas (e.g., "us-central1"). + """ + + location: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py new file mode 100644 index 0000000000..d73305b19f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -0,0 +1,2377 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.spanner_admin_instance_v1.types import common +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.spanner.admin.instance.v1', + manifest={ + 'ReplicaInfo', + 'InstanceConfig', + 'ReplicaComputeCapacity', + 'AutoscalingConfig', + 'Instance', + 'ListInstanceConfigsRequest', + 'ListInstanceConfigsResponse', + 'GetInstanceConfigRequest', + 'CreateInstanceConfigRequest', + 'UpdateInstanceConfigRequest', + 'DeleteInstanceConfigRequest', + 'ListInstanceConfigOperationsRequest', + 'ListInstanceConfigOperationsResponse', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'UpdateInstanceRequest', + 'DeleteInstanceRequest', + 'CreateInstanceMetadata', + 'UpdateInstanceMetadata', + 'FreeInstanceMetadata', + 'CreateInstanceConfigMetadata', + 'UpdateInstanceConfigMetadata', + 'InstancePartition', + 'CreateInstancePartitionMetadata', + 'CreateInstancePartitionRequest', + 'DeleteInstancePartitionRequest', + 'GetInstancePartitionRequest', + 'UpdateInstancePartitionRequest', + 'UpdateInstancePartitionMetadata', + 'ListInstancePartitionsRequest', + 'ListInstancePartitionsResponse', + 'ListInstancePartitionOperationsRequest', + 'ListInstancePartitionOperationsResponse', + 'MoveInstanceRequest', + 'MoveInstanceResponse', + 'MoveInstanceMetadata', + }, +) + + +class ReplicaInfo(proto.Message): + r""" + + Attributes: + location (str): + The location of the serving resources, e.g., + "us-central1". + type_ (google.cloud.spanner_admin_instance_v1.types.ReplicaInfo.ReplicaType): + The type of replica. + default_leader_location (bool): + If true, this location is designated as the default leader + location where leader replicas are placed. See the `region + types + documentation `__ + for more details. + """ + class ReplicaType(proto.Enum): + r"""Indicates the type of replica. See the `replica types + documentation `__ + for more details. + + Values: + TYPE_UNSPECIFIED (0): + Not specified. + READ_WRITE (1): + Read-write replicas support both reads and writes. These + replicas: + + - Maintain a full copy of your data. + - Serve reads. + - Can vote whether to commit a write. + - Participate in leadership election. + - Are eligible to become a leader. + READ_ONLY (2): + Read-only replicas only support reads (not writes). + Read-only replicas: + + - Maintain a full copy of your data. + - Serve reads. + - Do not participate in voting to commit writes. + - Are not eligible to become a leader. + WITNESS (3): + Witness replicas don't support reads but do participate in + voting to commit writes. Witness replicas: + + - Do not maintain a full copy of data. + - Do not serve reads. + - Vote whether to commit writes. + - Participate in leader election but are not eligible to + become leader. + """ + TYPE_UNSPECIFIED = 0 + READ_WRITE = 1 + READ_ONLY = 2 + WITNESS = 3 + + location: str = proto.Field( + proto.STRING, + number=1, + ) + type_: ReplicaType = proto.Field( + proto.ENUM, + number=2, + enum=ReplicaType, + ) + default_leader_location: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class InstanceConfig(proto.Message): + r"""A possible configuration for a Cloud Spanner instance. + Configurations define the geographic placement of nodes and + their replication. + + Attributes: + name (str): + A unique identifier for the instance configuration. Values + are of the form + ``projects//instanceConfigs/[a-z][-a-z0-9]*``. + + User instance configuration must start with ``custom-``. + display_name (str): + The name of this instance configuration as it + appears in UIs. + config_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.Type): + Output only. Whether this instance + configuration is a Google-managed or + user-managed configuration. + replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): + The geographic placement of nodes in this instance + configuration and their replication properties. + + To create user-managed configurations, input ``replicas`` + must include all replicas in ``replicas`` of the + ``base_config`` and include one or more replicas in the + ``optional_replicas`` of the ``base_config``. + optional_replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): + Output only. The available optional replicas + to choose from for user-managed configurations. + Populated for Google-managed configurations. + base_config (str): + Base configuration name, e.g. + projects//instanceConfigs/nam3, based on which + this configuration is created. Only set for user-managed + configurations. ``base_config`` must refer to a + configuration of type ``GOOGLE_MANAGED`` in the same project + as this configuration. + labels (MutableMapping[str, str]): + Cloud Labels are a flexible and lightweight mechanism for + organizing cloud resources into groups that reflect a + customer's organizational needs and deployment strategies. + Cloud Labels can be used to filter collections of resources. + They can be used to control how resource metrics are + aggregated. And they can be used as arguments to policy + management rules (e.g. route, firewall, load balancing, + etc.). + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z][a-z0-9_-]{0,62}``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``[a-z0-9_-]{0,63}``. + - No more than 64 labels can be associated with a given + resource. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + + If you plan to use labels in your own code, please note that + additional characters may be allowed in the future. + Therefore, you are advised to use an internal label + representation, such as JSON, which doesn't rely upon + specific characters being disallowed. For example, + representing labels as the string: name + "*" + value would + prove problematic if we were to allow "*" in a future + release. + etag (str): + etag is used for optimistic concurrency + control as a way to help prevent simultaneous + updates of a instance configuration from + overwriting each other. It is strongly suggested + that systems make use of the etag in the + read-modify-write cycle to perform instance + configuration updates in order to avoid race + conditions: An etag is returned in the response + which contains instance configurations, and + systems are expected to put that etag in the + request to update instance configuration to + ensure that their change is applied to the same + version of the instance configuration. If no + etag is provided in the call to update the + instance configuration, then the existing + instance configuration is overwritten blindly. + leader_options (MutableSequence[str]): + Allowed values of the "default_leader" schema option for + databases in instances that use this instance configuration. + reconciling (bool): + Output only. If true, the instance + configuration is being created or updated. If + false, there are no ongoing operations for the + instance configuration. + state (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.State): + Output only. The current instance configuration state. + Applicable only for ``USER_MANAGED`` configurations. + free_instance_availability (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.FreeInstanceAvailability): + Output only. Describes whether free instances + are available to be created in this instance + configuration. + quorum_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.QuorumType): + Output only. The ``QuorumType`` of the instance + configuration. + storage_limit_per_processing_unit (int): + Output only. The storage limit in bytes per + processing unit. + """ + class Type(proto.Enum): + r"""The type of this configuration. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified. + GOOGLE_MANAGED (1): + Google-managed configuration. + USER_MANAGED (2): + User-managed configuration. + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_MANAGED = 1 + USER_MANAGED = 2 + + class State(proto.Enum): + r"""Indicates the current state of the instance configuration. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The instance configuration is still being + created. + READY (2): + The instance configuration is fully created + and ready to be used to create instances. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + class FreeInstanceAvailability(proto.Enum): + r"""Describes the availability for free instances to be created + in an instance configuration. + + Values: + FREE_INSTANCE_AVAILABILITY_UNSPECIFIED (0): + Not specified. + AVAILABLE (1): + Indicates that free instances are available + to be created in this instance configuration. + UNSUPPORTED (2): + Indicates that free instances are not + supported in this instance configuration. + DISABLED (3): + Indicates that free instances are currently + not available to be created in this instance + configuration. + QUOTA_EXCEEDED (4): + Indicates that additional free instances + cannot be created in this instance configuration + because the project has reached its limit of + free instances. + """ + FREE_INSTANCE_AVAILABILITY_UNSPECIFIED = 0 + AVAILABLE = 1 + UNSUPPORTED = 2 + DISABLED = 3 + QUOTA_EXCEEDED = 4 + + class QuorumType(proto.Enum): + r"""Indicates the quorum type of this instance configuration. + + Values: + QUORUM_TYPE_UNSPECIFIED (0): + Quorum type not specified. + REGION (1): + An instance configuration tagged with ``REGION`` quorum type + forms a write quorum in a single region. + DUAL_REGION (2): + An instance configuration tagged with the ``DUAL_REGION`` + quorum type forms a write quorum with exactly two read-write + regions in a multi-region configuration. + + This instance configuration requires failover in the event + of regional failures. + MULTI_REGION (3): + An instance configuration tagged with the ``MULTI_REGION`` + quorum type forms a write quorum from replicas that are + spread across more than one region in a multi-region + configuration. + """ + QUORUM_TYPE_UNSPECIFIED = 0 + REGION = 1 + DUAL_REGION = 2 + MULTI_REGION = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + config_type: Type = proto.Field( + proto.ENUM, + number=5, + enum=Type, + ) + replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ReplicaInfo', + ) + optional_replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='ReplicaInfo', + ) + base_config: str = proto.Field( + proto.STRING, + number=7, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + etag: str = proto.Field( + proto.STRING, + number=9, + ) + leader_options: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + reconciling: bool = proto.Field( + proto.BOOL, + number=10, + ) + state: State = proto.Field( + proto.ENUM, + number=11, + enum=State, + ) + free_instance_availability: FreeInstanceAvailability = proto.Field( + proto.ENUM, + number=12, + enum=FreeInstanceAvailability, + ) + quorum_type: QuorumType = proto.Field( + proto.ENUM, + number=18, + enum=QuorumType, + ) + storage_limit_per_processing_unit: int = proto.Field( + proto.INT64, + number=19, + ) + + +class ReplicaComputeCapacity(proto.Message): + r"""ReplicaComputeCapacity describes the amount of server + resources that are allocated to each replica identified by the + replica selection. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): + Required. Identifies replicas by specified + properties. All replicas in the selection have + the same amount of compute capacity. + node_count (int): + The number of nodes allocated to each replica. + + This may be zero in API responses for instances that are not + yet in state ``READY``. + + This field is a member of `oneof`_ ``compute_capacity``. + processing_units (int): + The number of processing units allocated to each replica. + + This may be zero in API responses for instances that are not + yet in state ``READY``. + + This field is a member of `oneof`_ ``compute_capacity``. + """ + + replica_selection: common.ReplicaSelection = proto.Field( + proto.MESSAGE, + number=1, + message=common.ReplicaSelection, + ) + node_count: int = proto.Field( + proto.INT32, + number=2, + oneof='compute_capacity', + ) + processing_units: int = proto.Field( + proto.INT32, + number=3, + oneof='compute_capacity', + ) + + +class AutoscalingConfig(proto.Message): + r"""Autoscaling configuration for an instance. + + Attributes: + autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): + Required. Autoscaling limits for an instance. + autoscaling_targets (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingTargets): + Required. The autoscaling targets for an + instance. + asymmetric_autoscaling_options (MutableSequence[google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption]): + Optional. Optional asymmetric autoscaling + options. Replicas matching the replica selection + criteria will be autoscaled independently from + other replicas. The autoscaler will scale the + replicas based on the utilization of replicas + identified by the replica selection. Replica + selections should not overlap with each other. + + Other replicas (those do not match any replica + selection) will be autoscaled together and will + have the same compute capacity allocated to + them. + """ + + class AutoscalingLimits(proto.Message): + r"""The autoscaling limits for the instance. Users can define the + minimum and maximum compute capacity allocated to the instance, and + the autoscaler will only scale within that range. Users can either + use nodes or processing units to specify the limits, but should use + the same unit to set both the min_limit and max_limit. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + min_nodes (int): + Minimum number of nodes allocated to the + instance. If set, this number should be greater + than or equal to 1. + + This field is a member of `oneof`_ ``min_limit``. + min_processing_units (int): + Minimum number of processing units allocated + to the instance. If set, this number should be + multiples of 1000. + + This field is a member of `oneof`_ ``min_limit``. + max_nodes (int): + Maximum number of nodes allocated to the instance. If set, + this number should be greater than or equal to min_nodes. + + This field is a member of `oneof`_ ``max_limit``. + max_processing_units (int): + Maximum number of processing units allocated to the + instance. If set, this number should be multiples of 1000 + and be greater than or equal to min_processing_units. + + This field is a member of `oneof`_ ``max_limit``. + """ + + min_nodes: int = proto.Field( + proto.INT32, + number=1, + oneof='min_limit', + ) + min_processing_units: int = proto.Field( + proto.INT32, + number=2, + oneof='min_limit', + ) + max_nodes: int = proto.Field( + proto.INT32, + number=3, + oneof='max_limit', + ) + max_processing_units: int = proto.Field( + proto.INT32, + number=4, + oneof='max_limit', + ) + + class AutoscalingTargets(proto.Message): + r"""The autoscaling targets for an instance. + + Attributes: + high_priority_cpu_utilization_percent (int): + Required. The target high priority cpu utilization + percentage that the autoscaler should be trying to achieve + for the instance. This number is on a scale from 0 (no + utilization) to 100 (full utilization). The valid range is + [10, 90] inclusive. + storage_utilization_percent (int): + Required. The target storage utilization percentage that the + autoscaler should be trying to achieve for the instance. + This number is on a scale from 0 (no utilization) to 100 + (full utilization). The valid range is [10, 99] inclusive. + """ + + high_priority_cpu_utilization_percent: int = proto.Field( + proto.INT32, + number=1, + ) + storage_utilization_percent: int = proto.Field( + proto.INT32, + number=2, + ) + + class AsymmetricAutoscalingOption(proto.Message): + r"""AsymmetricAutoscalingOption specifies the scaling of replicas + identified by the given selection. + + Attributes: + replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): + Required. Selects the replicas to which this + AsymmetricAutoscalingOption applies. Only + read-only replicas are supported. + overrides (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides): + Optional. Overrides applied to the top-level + autoscaling configuration for the selected + replicas. + """ + + class AutoscalingConfigOverrides(proto.Message): + r"""Overrides the top-level autoscaling configuration for the replicas + identified by ``replica_selection``. All fields in this message are + optional. Any unspecified fields will use the corresponding values + from the top-level autoscaling configuration. + + Attributes: + autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): + Optional. If specified, overrides the min/max + limit in the top-level autoscaling configuration + for the selected replicas. + autoscaling_target_high_priority_cpu_utilization_percent (int): + Optional. If specified, overrides the autoscaling target + high_priority_cpu_utilization_percent in the top-level + autoscaling configuration for the selected replicas. + """ + + autoscaling_limits: 'AutoscalingConfig.AutoscalingLimits' = proto.Field( + proto.MESSAGE, + number=1, + message='AutoscalingConfig.AutoscalingLimits', + ) + autoscaling_target_high_priority_cpu_utilization_percent: int = proto.Field( + proto.INT32, + number=2, + ) + + replica_selection: common.ReplicaSelection = proto.Field( + proto.MESSAGE, + number=1, + message=common.ReplicaSelection, + ) + overrides: 'AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides' = proto.Field( + proto.MESSAGE, + number=2, + message='AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides', + ) + + autoscaling_limits: AutoscalingLimits = proto.Field( + proto.MESSAGE, + number=1, + message=AutoscalingLimits, + ) + autoscaling_targets: AutoscalingTargets = proto.Field( + proto.MESSAGE, + number=2, + message=AutoscalingTargets, + ) + asymmetric_autoscaling_options: MutableSequence[AsymmetricAutoscalingOption] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=AsymmetricAutoscalingOption, + ) + + +class Instance(proto.Message): + r"""An isolated set of Cloud Spanner resources on which databases + can be hosted. + + Attributes: + name (str): + Required. A unique identifier for the instance, which cannot + be changed after the instance is created. Values are of the + form + ``projects//instances/[a-z][-a-z0-9]*[a-z0-9]``. + The final segment of the name must be between 2 and 64 + characters in length. + config (str): + Required. The name of the instance's configuration. Values + are of the form + ``projects//instanceConfigs/``. See + also + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + and + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + display_name (str): + Required. The descriptive name for this + instance as it appears in UIs. Must be unique + per project and between 4 and 30 characters in + length. + node_count (int): + The number of nodes allocated to this instance. At most, one + of either ``node_count`` or ``processing_units`` should be + present in the message. + + Users can set the ``node_count`` field to specify the target + number of nodes allocated to the instance. + + If autoscaling is enabled, ``node_count`` is treated as an + ``OUTPUT_ONLY`` field and reflects the current number of + nodes allocated to the instance. + + This might be zero in API responses for instances that are + not yet in the ``READY`` state. + + For more information, see `Compute capacity, nodes, and + processing + units `__. + processing_units (int): + The number of processing units allocated to this instance. + At most, one of either ``processing_units`` or + ``node_count`` should be present in the message. + + Users can set the ``processing_units`` field to specify the + target number of processing units allocated to the instance. + + If autoscaling is enabled, ``processing_units`` is treated + as an ``OUTPUT_ONLY`` field and reflects the current number + of processing units allocated to the instance. + + This might be zero in API responses for instances that are + not yet in the ``READY`` state. + + For more information, see `Compute capacity, nodes and + processing + units `__. + replica_compute_capacity (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaComputeCapacity]): + Output only. Lists the compute capacity per + ReplicaSelection. A replica selection identifies + a set of replicas with common properties. + Replicas identified by a ReplicaSelection are + scaled with the same compute capacity. + autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): + Optional. The autoscaling configuration. Autoscaling is + enabled if this field is set. When autoscaling is enabled, + node_count and processing_units are treated as OUTPUT_ONLY + fields and reflect the current compute capacity allocated to + the instance. + state (google.cloud.spanner_admin_instance_v1.types.Instance.State): + Output only. The current instance state. For + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], + the state must be either omitted or set to ``CREATING``. For + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], + the state must be either omitted or set to ``READY``. + labels (MutableMapping[str, str]): + Cloud Labels are a flexible and lightweight mechanism for + organizing cloud resources into groups that reflect a + customer's organizational needs and deployment strategies. + Cloud Labels can be used to filter collections of resources. + They can be used to control how resource metrics are + aggregated. And they can be used as arguments to policy + management rules (e.g. route, firewall, load balancing, + etc.). + + - Label keys must be between 1 and 63 characters long and + must conform to the following regular expression: + ``[a-z][a-z0-9_-]{0,62}``. + - Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``[a-z0-9_-]{0,63}``. + - No more than 64 labels can be associated with a given + resource. + + See https://goo.gl/xmQnxf for more information on and + examples of labels. + + If you plan to use labels in your own code, please note that + additional characters may be allowed in the future. And so + you are advised to use an internal label representation, + such as JSON, which doesn't rely upon specific characters + being disallowed. For example, representing labels as the + string: name + "*" + value would prove problematic if we + were to allow "*" in a future release. + instance_type (google.cloud.spanner_admin_instance_v1.types.Instance.InstanceType): + The ``InstanceType`` of the current instance. + endpoint_uris (MutableSequence[str]): + Deprecated. This field is not populated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + was most recently updated. + free_instance_metadata (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata): + Free instance metadata. Only populated for + free instances. + edition (google.cloud.spanner_admin_instance_v1.types.Instance.Edition): + Optional. The ``Edition`` of the current instance. + default_backup_schedule_type (google.cloud.spanner_admin_instance_v1.types.Instance.DefaultBackupScheduleType): + Optional. Controls the default backup schedule behavior for + new databases within the instance. By default, a backup + schedule is created automatically when a new database is + created in a new instance. + + Note that the ``AUTOMATIC`` value isn't permitted for free + instances, as backups and backup schedules aren't supported + for free instances. + + In the ``GetInstance`` or ``ListInstances`` response, if the + value of ``default_backup_schedule_type`` isn't set, or set + to ``NONE``, Spanner doesn't create a default backup + schedule for new databases in the instance. + """ + class State(proto.Enum): + r"""Indicates the current state of the instance. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The instance is still being created. + Resources may not be available yet, and + operations such as database creation may not + work. + READY (2): + The instance is fully created and ready to do + work such as creating databases. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + class InstanceType(proto.Enum): + r"""The type of this instance. The type can be used to distinguish + product variants, that can affect aspects like: usage restrictions, + quotas and billing. Currently this is used to distinguish + FREE_INSTANCE vs PROVISIONED instances. + + Values: + INSTANCE_TYPE_UNSPECIFIED (0): + Not specified. + PROVISIONED (1): + Provisioned instances have dedicated + resources, standard usage limits and support. + FREE_INSTANCE (2): + Free instances provide no guarantee for dedicated resources, + [node_count, processing_units] should be 0. They come with + stricter usage limits and limited support. + """ + INSTANCE_TYPE_UNSPECIFIED = 0 + PROVISIONED = 1 + FREE_INSTANCE = 2 + + class Edition(proto.Enum): + r"""The edition selected for this instance. Different editions + provide different capabilities at different price points. + + Values: + EDITION_UNSPECIFIED (0): + Edition not specified. + STANDARD (1): + Standard edition. + ENTERPRISE (2): + Enterprise edition. + ENTERPRISE_PLUS (3): + Enterprise Plus edition. + """ + EDITION_UNSPECIFIED = 0 + STANDARD = 1 + ENTERPRISE = 2 + ENTERPRISE_PLUS = 3 + + class DefaultBackupScheduleType(proto.Enum): + r"""Indicates the `default backup + schedule `__ + behavior for new databases within the instance. + + Values: + DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED (0): + Not specified. + NONE (1): + A default backup schedule isn't created + automatically when a new database is created in + the instance. + AUTOMATIC (2): + A default backup schedule is created + automatically when a new database is created in + the instance. The default backup schedule + creates a full backup every 24 hours. These full + backups are retained for 7 days. You can edit or + delete the default backup schedule once it's + created. + """ + DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED = 0 + NONE = 1 + AUTOMATIC = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + node_count: int = proto.Field( + proto.INT32, + number=5, + ) + processing_units: int = proto.Field( + proto.INT32, + number=9, + ) + replica_compute_capacity: MutableSequence['ReplicaComputeCapacity'] = proto.RepeatedField( + proto.MESSAGE, + number=19, + message='ReplicaComputeCapacity', + ) + autoscaling_config: 'AutoscalingConfig' = proto.Field( + proto.MESSAGE, + number=17, + message='AutoscalingConfig', + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + instance_type: InstanceType = proto.Field( + proto.ENUM, + number=10, + enum=InstanceType, + ) + endpoint_uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + free_instance_metadata: 'FreeInstanceMetadata' = proto.Field( + proto.MESSAGE, + number=13, + message='FreeInstanceMetadata', + ) + edition: Edition = proto.Field( + proto.ENUM, + number=20, + enum=Edition, + ) + default_backup_schedule_type: DefaultBackupScheduleType = proto.Field( + proto.ENUM, + number=23, + enum=DefaultBackupScheduleType, + ) + + +class ListInstanceConfigsRequest(proto.Message): + r"""The request for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Attributes: + parent (str): + Required. The name of the project for which a list of + supported instance configurations is requested. Values are + of the form ``projects/``. + page_size (int): + Number of instance configurations to be + returned in the response. If 0 or less, defaults + to the server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] + from a previous + [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInstanceConfigsResponse(proto.Message): + r"""The response for + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + + Attributes: + instance_configs (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstanceConfig]): + The list of requested instance + configurations. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] + call to fetch more of the matching instance configurations. + """ + + @property + def raw_page(self): + return self + + instance_configs: MutableSequence['InstanceConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstanceConfigRequest(proto.Message): + r"""The request for + [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. + + Attributes: + name (str): + Required. The name of the requested instance configuration. + Values are of the form + ``projects//instanceConfigs/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceConfigRequest(proto.Message): + r"""The request for + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + + Attributes: + parent (str): + Required. The name of the project in which to create the + instance configuration. Values are of the form + ``projects/``. + instance_config_id (str): + Required. The ID of the instance configuration to create. + Valid identifiers are of the form + ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 + characters in length. The ``custom-`` prefix is required to + avoid name conflicts with Google-managed configurations. + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The ``InstanceConfig`` proto of the configuration + to create. ``instance_config.name`` must be + ``/instanceConfigs/``. + ``instance_config.base_config`` must be a Google-managed + configuration name, e.g. /instanceConfigs/us-east1, + /instanceConfigs/nam3. + validate_only (bool): + An option to validate, but not actually + execute, a request, and provide the same + response. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_config_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='InstanceConfig', + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateInstanceConfigRequest(proto.Message): + r"""The request for + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + + Attributes: + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + Required. The user instance configuration to update, which + must always include the instance configuration name. + Otherwise, only fields mentioned in + [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] + need be included. To prevent conflicts of concurrent + updates, + [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] + can be used. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + should be updated. The field mask must always be specified; + this prevents any future fields in + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + from being erased accidentally by clients that do not know + about them. Only display_name and labels can be updated. + validate_only (bool): + An option to validate, but not actually + execute, a request, and provide the same + response. + """ + + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteInstanceConfigRequest(proto.Message): + r"""The request for + [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. + + Attributes: + name (str): + Required. The name of the instance configuration to be + deleted. Values are of the form + ``projects//instanceConfigs/`` + etag (str): + Used for optimistic concurrency control as a + way to help prevent simultaneous deletes of an + instance configuration from overwriting each + other. If not empty, the API + only deletes the instance configuration when the + etag provided matches the current status of the + requested instance configuration. Otherwise, + deletes the instance configuration without + checking the current status of the requested + instance configuration. + validate_only (bool): + An option to validate, but not actually + execute, a request, and provide the same + response. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListInstanceConfigOperationsRequest(proto.Message): + r"""The request for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Attributes: + parent (str): + Required. The project of the instance configuration + operations. Values are of the form ``projects/``. + filter (str): + An expression that filters the list of returned operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the Operation are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata] + is + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first, if filtering + on metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=`` + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata) AND`` + ``(metadata.instance_config.name:custom-config) AND`` + ``(metadata.progress.start_time < \"2021-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. + - The instance configuration name contains + "custom-config". + - The operation started before 2021-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Number of operations to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse.next_page_token] + from a previous + [ListInstanceConfigOperationsResponse][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse] + to the same ``parent`` and with the same ``filter``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListInstanceConfigOperationsResponse(proto.Message): + r"""The response for + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching instance configuration long-running + operations. Each operation's name will be prefixed by the + name of the instance configuration. The operation's metadata + field type ``metadata.type_url`` describes the type of the + metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations] + call to fetch more of the matching metadata. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstanceRequest(proto.Message): + r"""The request for + [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. + + Attributes: + name (str): + Required. The name of the requested instance. Values are of + the form ``projects//instances/``. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + If field_mask is present, specifies the subset of + [Instance][google.spanner.admin.instance.v1.Instance] fields + that should be returned. If absent, all + [Instance][google.spanner.admin.instance.v1.Instance] fields + are returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + field_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class CreateInstanceRequest(proto.Message): + r"""The request for + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + + Attributes: + parent (str): + Required. The name of the project in which to create the + instance. Values are of the form ``projects/``. + instance_id (str): + Required. The ID of the instance to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and + must be between 2 and 64 characters in length. + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to create. The name may be omitted, + but if specified must be + ``/instances/``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + + +class ListInstancesRequest(proto.Message): + r"""The request for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Attributes: + parent (str): + Required. The name of the project for which a list of + instances is requested. Values are of the form + ``projects/``. + page_size (int): + Number of instances to be returned in the + response. If 0 or less, defaults to the server's + maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] + from a previous + [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. + filter (str): + An expression for filtering the results of the request. + Filter rules are case insensitive. The fields eligible for + filtering are: + + - ``name`` + - ``display_name`` + - ``labels.key`` where key is the name of a label + + Some examples of using filters are: + + - ``name:*`` --> The instance has a name. + - ``name:Howl`` --> The instance's name contains the string + "howl". + - ``name:HOWL`` --> Equivalent to above. + - ``NAME:howl`` --> Equivalent to above. + - ``labels.env:*`` --> The instance has the label "env". + - ``labels.env:dev`` --> The instance has the label "env" + and the value of the label contains the string "dev". + - ``name:howl labels.env:dev`` --> The instance's name + contains "howl" and it has the label "env" with its value + containing "dev". + instance_deadline (google.protobuf.timestamp_pb2.Timestamp): + Deadline used while retrieving metadata for instances. + Instances whose metadata cannot be retrieved within this + deadline will be added to + [unreachable][google.spanner.admin.instance.v1.ListInstancesResponse.unreachable] + in + [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + instance_deadline: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancesResponse(proto.Message): + r"""The response for + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. + + Attributes: + instances (MutableSequence[google.cloud.spanner_admin_instance_v1.types.Instance]): + The list of requested instances. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] + call to fetch more of the matching instances. + unreachable (MutableSequence[str]): + The list of unreachable instances. It includes the names of + instances whose metadata could not be retrieved within + [instance_deadline][google.spanner.admin.instance.v1.ListInstancesRequest.instance_deadline]. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""The request for + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + Required. The instance to update, which must always include + the instance name. Otherwise, only fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] + need be included. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [Instance][google.spanner.admin.instance.v1.Instance] should + be updated. The field mask must always be specified; this + prevents any future fields in + [Instance][google.spanner.admin.instance.v1.Instance] from + being erased accidentally by clients that do not know about + them. + """ + + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + field_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""The request for + [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. + + Attributes: + name (str): + Required. The name of the instance to be deleted. Values are + of the form ``projects//instances/`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + The instance being created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the + [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): + The expected fulfillment period of this + create operation. + """ + + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( + proto.ENUM, + number=5, + enum=common.FulfillmentPeriod, + ) + + +class UpdateInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. + + Attributes: + instance (google.cloud.spanner_admin_instance_v1.types.Instance): + The desired end state of the update. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which + [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): + The expected fulfillment period of this + update operation. + """ + + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=1, + message='Instance', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( + proto.ENUM, + number=5, + enum=common.FulfillmentPeriod, + ) + + +class FreeInstanceMetadata(proto.Message): + r"""Free instance specific metadata that is kept even after an + instance has been upgraded for tracking purposes. + + Attributes: + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp after which the + instance will either be upgraded or scheduled + for deletion after a grace period. + ExpireBehavior is used to choose between + upgrading or scheduling the free instance for + deletion. This timestamp is set during the + creation of a free instance. + upgrade_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. If present, the timestamp at + which the free instance was upgraded to a + provisioned instance. + expire_behavior (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata.ExpireBehavior): + Specifies the expiration behavior of a free instance. The + default of ExpireBehavior is ``REMOVE_AFTER_GRACE_PERIOD``. + This can be modified during or after creation, and before + expiration. + """ + class ExpireBehavior(proto.Enum): + r"""Allows users to change behavior when a free instance expires. + + Values: + EXPIRE_BEHAVIOR_UNSPECIFIED (0): + Not specified. + FREE_TO_PROVISIONED (1): + When the free instance expires, upgrade the + instance to a provisioned instance. + REMOVE_AFTER_GRACE_PERIOD (2): + When the free instance expires, disable the + instance, and delete it after the grace period + passes if it has not been upgraded. + """ + EXPIRE_BEHAVIOR_UNSPECIFIED = 0 + FREE_TO_PROVISIONED = 1 + REMOVE_AFTER_GRACE_PERIOD = 2 + + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + upgrade_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + expire_behavior: ExpireBehavior = proto.Field( + proto.ENUM, + number=3, + enum=ExpireBehavior, + ) + + +class CreateInstanceConfigMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. + + Attributes: + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + The target instance configuration end state. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class UpdateInstanceConfigMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. + + Attributes: + instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): + The desired instance configuration after + updating. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig] + operation. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + instance_config: 'InstanceConfig' = proto.Field( + proto.MESSAGE, + number=1, + message='InstanceConfig', + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class InstancePartition(proto.Message): + r"""An isolated set of Cloud Spanner resources that databases can + define placements on. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. A unique identifier for the instance partition. + Values are of the form + ``projects//instances//instancePartitions/[a-z][-a-z0-9]*[a-z0-9]``. + The final segment of the name must be between 2 and 64 + characters in length. An instance partition's name cannot be + changed after the instance partition is created. + config (str): + Required. The name of the instance partition's + configuration. Values are of the form + ``projects//instanceConfigs/``. See + also + [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] + and + [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + display_name (str): + Required. The descriptive name for this + instance partition as it appears in UIs. Must be + unique per project and between 4 and 30 + characters in length. + node_count (int): + The number of nodes allocated to this instance partition. + + Users can set the ``node_count`` field to specify the target + number of nodes allocated to the instance partition. + + This may be zero in API responses for instance partitions + that are not yet in state ``READY``. + + This field is a member of `oneof`_ ``compute_capacity``. + processing_units (int): + The number of processing units allocated to this instance + partition. + + Users can set the ``processing_units`` field to specify the + target number of processing units allocated to the instance + partition. + + This might be zero in API responses for instance partitions + that are not yet in the ``READY`` state. + + This field is a member of `oneof`_ ``compute_capacity``. + autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): + Optional. The autoscaling configuration. Autoscaling is + enabled if this field is set. When autoscaling is enabled, + fields in compute_capacity are treated as OUTPUT_ONLY fields + and reflect the current compute capacity allocated to the + instance partition. + state (google.cloud.spanner_admin_instance_v1.types.InstancePartition.State): + Output only. The current instance partition + state. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + partition was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the instance + partition was most recently updated. + referencing_databases (MutableSequence[str]): + Output only. The names of the databases that + reference this instance partition. Referencing + databases should share the parent instance. The + existence of any referencing database prevents + the instance partition from being deleted. + referencing_backups (MutableSequence[str]): + Output only. Deprecated: This field is not + populated. Output only. The names of the backups + that reference this instance partition. + Referencing backups should share the parent + instance. The existence of any referencing + backup prevents the instance partition from + being deleted. + etag (str): + Used for optimistic concurrency control as a + way to help prevent simultaneous updates of a + instance partition from overwriting each other. + It is strongly suggested that systems make use + of the etag in the read-modify-write cycle to + perform instance partition updates in order to + avoid race conditions: An etag is returned in + the response which contains instance partitions, + and systems are expected to put that etag in the + request to update instance partitions to ensure + that their change will be applied to the same + version of the instance partition. If no etag is + provided in the call to update instance + partition, then the existing instance partition + is overwritten blindly. + """ + class State(proto.Enum): + r"""Indicates the current state of the instance partition. + + Values: + STATE_UNSPECIFIED (0): + Not specified. + CREATING (1): + The instance partition is still being + created. Resources may not be available yet, and + operations such as creating placements using + this instance partition may not work. + READY (2): + The instance partition is fully created and + ready to do work such as creating placements and + using in databases. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + node_count: int = proto.Field( + proto.INT32, + number=5, + oneof='compute_capacity', + ) + processing_units: int = proto.Field( + proto.INT32, + number=6, + oneof='compute_capacity', + ) + autoscaling_config: 'AutoscalingConfig' = proto.Field( + proto.MESSAGE, + number=13, + message='AutoscalingConfig', + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + referencing_databases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=10, + ) + referencing_backups: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=11, + ) + etag: str = proto.Field( + proto.STRING, + number=12, + ) + + +class CreateInstancePartitionMetadata(proto.Message): + r"""Metadata type for the operation returned by + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + + Attributes: + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + The instance partition being created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + """ + + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class CreateInstancePartitionRequest(proto.Message): + r"""The request for + [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. + + Attributes: + parent (str): + Required. The name of the instance in which to create the + instance partition. Values are of the form + ``projects//instances/``. + instance_partition_id (str): + Required. The ID of the instance partition to create. Valid + identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and + must be between 2 and 64 characters in length. + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to create. The + instance_partition.name may be omitted, but if specified + must be + ``/instancePartitions/``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_partition_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=3, + message='InstancePartition', + ) + + +class DeleteInstancePartitionRequest(proto.Message): + r"""The request for + [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. + + Attributes: + name (str): + Required. The name of the instance partition to be deleted. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` + etag (str): + Optional. If not empty, the API only deletes + the instance partition when the etag provided + matches the current status of the requested + instance partition. Otherwise, deletes the + instance partition without checking the current + status of the requested instance partition. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetInstancePartitionRequest(proto.Message): + r"""The request for + [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. + + Attributes: + name (str): + Required. The name of the requested instance partition. + Values are of the form + ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateInstancePartitionRequest(proto.Message): + r"""The request for + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + + Attributes: + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + Required. The instance partition to update, which must + always include the instance partition name. Otherwise, only + fields mentioned in + [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] + need be included. + field_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A mask specifying which fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + should be updated. The field mask must always be specified; + this prevents any future fields in + [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] + from being erased accidentally by clients that do not know + about them. + """ + + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + field_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class UpdateInstancePartitionMetadata(proto.Message): + r"""Metadata type for the operation returned by + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. + + Attributes: + instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): + The desired end state of the update. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which + [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition] + request was received. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. If set, this operation is in the + process of undoing itself (which is guaranteed + to succeed) and cannot be cancelled again. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation failed or + was completed successfully. + """ + + instance_partition: 'InstancePartition' = proto.Field( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancePartitionsRequest(proto.Message): + r"""The request for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Attributes: + parent (str): + Required. The instance whose instance partitions should be + listed. Values are of the form + ``projects//instances/``. Use + ``{instance} = '-'`` to list instance partitions for all + Instances in a project, e.g., + ``projects/myproject/instances/-``. + page_size (int): + Number of instance partitions to be returned + in the response. If 0 or less, defaults to the + server's maximum allowed page size. + page_token (str): + If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.next_page_token] + from a previous + [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. + instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): + Optional. Deadline used while retrieving metadata for + instance partitions. Instance partitions whose metadata + cannot be retrieved within this deadline will be added to + [unreachable][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.unreachable] + in + [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancePartitionsResponse(proto.Message): + r"""The response for + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. + + Attributes: + instance_partitions (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstancePartition]): + The list of requested instancePartitions. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions] + call to fetch more of the matching instance partitions. + unreachable (MutableSequence[str]): + The list of unreachable instances or instance partitions. It + includes the names of instances or instance partitions whose + metadata could not be retrieved within + [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionsRequest.instance_partition_deadline]. + """ + + @property + def raw_page(self): + return self + + instance_partitions: MutableSequence['InstancePartition'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InstancePartition', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ListInstancePartitionOperationsRequest(proto.Message): + r"""The request for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Attributes: + parent (str): + Required. The parent instance of the instance partition + operations. Values are of the form + ``projects//instances/``. + filter (str): + Optional. An expression that filters the list of returned + operations. + + A filter expression consists of a field name, a comparison + operator, and a value for filtering. The value must be a + string, a number, or a boolean. The comparison operator must + be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or + ``:``. Colon ``:`` is the contains operator. Filter rules + are not case sensitive. + + The following fields in the Operation are eligible for + filtering: + + - ``name`` - The name of the long-running operation + - ``done`` - False if the operation is in progress, else + true. + - ``metadata.@type`` - the type of metadata. For example, + the type string for + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata] + is + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata``. + - ``metadata.`` - any field in metadata.value. + ``metadata.@type`` must be specified first, if filtering + on metadata fields. + - ``error`` - Error associated with the long-running + operation. + - ``response.@type`` - the type of response. + - ``response.`` - any field in response.value. + + You can combine multiple expressions by enclosing each + expression in parentheses. By default, expressions are + combined with AND logic. However, you can specify AND, OR, + and NOT logic explicitly. + + Here are a few examples: + + - ``done:true`` - The operation is complete. + - ``(metadata.@type=`` + ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata) AND`` + ``(metadata.instance_partition.name:custom-instance-partition) AND`` + ``(metadata.start_time < \"2021-03-28T14:50:00Z\") AND`` + ``(error:*)`` - Return operations where: + + - The operation's metadata type is + [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. + - The instance partition name contains + "custom-instance-partition". + - The operation started before 2021-03-28T14:50:00Z. + - The operation resulted in an error. + page_size (int): + Optional. Number of operations to be returned + in the response. If 0 or less, defaults to the + server's maximum allowed page size. + page_token (str): + Optional. If non-empty, ``page_token`` should contain a + [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.next_page_token] + from a previous + [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse] + to the same ``parent`` and with the same ``filter``. + instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): + Optional. Deadline used while retrieving metadata for + instance partition operations. Instance partitions whose + operation metadata cannot be retrieved within this deadline + will be added to + [unreachable_instance_partitions][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.unreachable_instance_partitions] + in + [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancePartitionOperationsResponse(proto.Message): + r"""The response for + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. + + Attributes: + operations (MutableSequence[google.longrunning.operations_pb2.Operation]): + The list of matching instance partition long-running + operations. Each operation's name will be prefixed by the + instance partition's name. The operation's metadata field + type ``metadata.type_url`` describes the type of the + metadata. + next_page_token (str): + ``next_page_token`` can be sent in a subsequent + [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations] + call to fetch more of the matching metadata. + unreachable_instance_partitions (MutableSequence[str]): + The list of unreachable instance partitions. It includes the + names of instance partitions whose operation metadata could + not be retrieved within + [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.instance_partition_deadline]. + """ + + @property + def raw_page(self): + return self + + operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=operations_pb2.Operation, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_instance_partitions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class MoveInstanceRequest(proto.Message): + r"""The request for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + Attributes: + name (str): + Required. The instance to move. Values are of the form + ``projects//instances/``. + target_config (str): + Required. The target instance configuration where to move + the instance. Values are of the form + ``projects//instanceConfigs/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + target_config: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MoveInstanceResponse(proto.Message): + r"""The response for + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + """ + + +class MoveInstanceMetadata(proto.Message): + r"""Metadata type for the operation returned by + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. + + Attributes: + target_config (str): + The target instance configuration where to move the + instance. Values are of the form + ``projects//instanceConfigs/``. + progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): + The progress of the + [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance] + operation. + [progress_percent][google.spanner.admin.instance.v1.OperationProgress.progress_percent] + is reset when cancellation is requested. + cancel_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which this operation was + cancelled. + """ + + target_config: str = proto.Field( + proto.STRING, + number=1, + ) + progress: common.OperationProgress = proto.Field( + proto.MESSAGE, + number=2, + message=common.OperationProgress, + ) + cancel_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini new file mode 100644 index 0000000000..574c5aed39 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py new file mode 100644 index 0000000000..5918b3e5f7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil + +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] + +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = "google-cloud-spanner-admin-instance" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=PREVIEW_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json new file mode 100644 index 0000000000..f58e9794e2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json @@ -0,0 +1,3450 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.spanner.admin.instance.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-spanner-admin-instance", + "version": "0.0.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "instance_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance_config" + }, + "description": "Sample for CreateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_create_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "instance_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance_config" + }, + "description": "Sample for CreateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_create_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "instance_partition_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance_partition" + }, + "description": "Sample for CreateInstancePartition", + "file": "spanner_v1_generated_instance_admin_create_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "instance_partition_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance_partition" + }, + "description": "Sample for CreateInstancePartition", + "file": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "spanner_v1_generated_instance_admin_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "spanner_v1_generated_instance_admin_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_config" + }, + "description": "Sample for DeleteInstanceConfig", + "file": "spanner_v1_generated_instance_admin_delete_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_config" + }, + "description": "Sample for DeleteInstanceConfig", + "file": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_partition" + }, + "description": "Sample for DeleteInstancePartition", + "file": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance_partition" + }, + "description": "Sample for DeleteInstancePartition", + "file": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "spanner_v1_generated_instance_admin_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "spanner_v1_generated_instance_admin_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_instance_admin_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", + "shortName": "get_instance_config" + }, + "description": "Sample for GetInstanceConfig", + "file": "spanner_v1_generated_instance_admin_get_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", + "shortName": "get_instance_config" + }, + "description": "Sample for GetInstanceConfig", + "file": "spanner_v1_generated_instance_admin_get_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", + "shortName": "get_instance_partition" + }, + "description": "Sample for GetInstancePartition", + "file": "spanner_v1_generated_instance_admin_get_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", + "shortName": "get_instance_partition" + }, + "description": "Sample for GetInstancePartition", + "file": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "spanner_v1_generated_instance_admin_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "spanner_v1_generated_instance_admin_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_config_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager", + "shortName": "list_instance_config_operations" + }, + "description": "Sample for ListInstanceConfigOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_config_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager", + "shortName": "list_instance_config_operations" + }, + "description": "Sample for ListInstanceConfigOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_configs", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager", + "shortName": "list_instance_configs" + }, + "description": "Sample for ListInstanceConfigs", + "file": "spanner_v1_generated_instance_admin_list_instance_configs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_configs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_configs", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstanceConfigs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager", + "shortName": "list_instance_configs" + }, + "description": "Sample for ListInstanceConfigs", + "file": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partition_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitionOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager", + "shortName": "list_instance_partition_operations" + }, + "description": "Sample for ListInstancePartitionOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partition_operations", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitionOperations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager", + "shortName": "list_instance_partition_operations" + }, + "description": "Sample for ListInstancePartitionOperations", + "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partitions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager", + "shortName": "list_instance_partitions" + }, + "description": "Sample for ListInstancePartitions", + "file": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partitions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstancePartitions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager", + "shortName": "list_instance_partitions" + }, + "description": "Sample for ListInstancePartitions", + "file": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instances", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "spanner_v1_generated_instance_admin_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instances", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "spanner_v1_generated_instance_admin_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.move_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "spanner_v1_generated_instance_admin_move_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_move_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.move_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "MoveInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "move_instance" + }, + "description": "Sample for MoveInstance", + "file": "spanner_v1_generated_instance_admin_move_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_move_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_instance_admin_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.set_iam_policy", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.test_iam_permissions", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "permissions", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance_config" + }, + "description": "Sample for UpdateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_update_instance_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_config", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstanceConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" + }, + { + "name": "instance_config", + "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance_config" + }, + "description": "Sample for UpdateInstanceConfig", + "file": "spanner_v1_generated_instance_admin_update_instance_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance_partition" + }, + "description": "Sample for UpdateInstancePartition", + "file": "spanner_v1_generated_instance_admin_update_instance_partition_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_partition_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_partition", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstancePartition" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" + }, + { + "name": "instance_partition", + "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance_partition" + }, + "description": "Sample for UpdateInstancePartition", + "file": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", + "shortName": "InstanceAdminAsyncClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "spanner_v1_generated_instance_admin_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", + "shortName": "InstanceAdminClient" + }, + "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance", + "method": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", + "service": { + "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", + "shortName": "InstanceAdmin" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.spanner_admin_instance_v1.types.Instance" + }, + { + "name": "field_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "spanner_v1_generated_instance_admin_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "spanner_v1_generated_instance_admin_update_instance_sync.py" + } + ] +} diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py new file mode 100644 index 0000000000..74bd640044 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py new file mode 100644 index 0000000000..c3f266e4c4 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py new file mode 100644 index 0000000000..c5b7616534 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_create_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.CreateInstanceConfigRequest( + parent="parent_value", + instance_config_id="instance_config_id_value", + ) + + # Make the request + operation = client.create_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py new file mode 100644 index 0000000000..a22765f53f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py new file mode 100644 index 0000000000..5b5f2e0e26 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_create_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstancePartitionRequest( + parent="parent_value", + instance_partition_id="instance_partition_id_value", + instance_partition=instance_partition, + ) + + # Make the request + operation = client.create_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py new file mode 100644 index 0000000000..f43c5016b5 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_create_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_CreateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py new file mode 100644 index 0000000000..262da709aa --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py new file mode 100644 index 0000000000..df83d9e424 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_config(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py new file mode 100644 index 0000000000..9a9c4d7ca1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_delete_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_config(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py new file mode 100644 index 0000000000..78ca44d6c2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_instance_partition(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py new file mode 100644 index 0000000000..72249ef6c7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_delete_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_instance_partition(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py new file mode 100644 index 0000000000..613ac6c070 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_delete_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + client.delete_instance(request=request) + + +# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py new file mode 100644 index 0000000000..a0b620ae4f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py new file mode 100644 index 0000000000..cc0d725a03 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py new file mode 100644 index 0000000000..059eb2a078 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py new file mode 100644 index 0000000000..9adfb51c2e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_config(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py new file mode 100644 index 0000000000..16e9d3c3c8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_get_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_config(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py new file mode 100644 index 0000000000..8e84abcf6e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_partition(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py new file mode 100644 index 0000000000..d617cbb382 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_get_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstancePartitionRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_partition(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py new file mode 100644 index 0000000000..4a246a5bf3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_get_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_GetInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py new file mode 100644 index 0000000000..a0580fef7c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py new file mode 100644 index 0000000000..89213b3a2e --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_config_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_config_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py new file mode 100644 index 0000000000..651b2f88ae --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py new file mode 100644 index 0000000000..a0f120277a --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstanceConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_configs(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstanceConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py new file mode 100644 index 0000000000..9dedb973f1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitionOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py new file mode 100644 index 0000000000..b2a7549b29 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitionOperations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_partition_operations(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partition_operations(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py new file mode 100644 index 0000000000..56adc152fe --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py new file mode 100644 index 0000000000..1e65552fc1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstancePartitions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instance_partitions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancePartitionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instance_partitions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py new file mode 100644 index 0000000000..abe1a1affa --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstances_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py new file mode 100644 index 0000000000..f344baff11 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_list_instances(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END spanner_v1_generated_InstanceAdmin_ListInstances_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py new file mode 100644 index 0000000000..ce62120492 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_MoveInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_MoveInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py new file mode 100644 index 0000000000..4621200e0c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MoveInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_MoveInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_move_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.MoveInstanceRequest( + name="name_value", + target_config="target_config_value", + ) + + # Make the request + operation = client.move_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_MoveInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py new file mode 100644 index 0000000000..2443f2127d --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py new file mode 100644 index 0000000000..ba6401602f --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py new file mode 100644 index 0000000000..aa0e05dde3 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py new file mode 100644 index 0000000000..80b2a4dd21 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py new file mode 100644 index 0000000000..ecabbf5191 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py new file mode 100644 index 0000000000..f7ea78401c --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py new file mode 100644 index 0000000000..1d184f6c58 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstanceConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_update_instance_config(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( + ) + + # Make the request + operation = client.update_instance_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py new file mode 100644 index 0000000000..42d3c484f8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +async def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminAsyncClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py new file mode 100644 index 0000000000..56cd2760a1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstancePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_update_instance_partition(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance_partition = spanner_admin_instance_v1.InstancePartition() + instance_partition.node_count = 1070 + instance_partition.name = "name_value" + instance_partition.config = "config_value" + instance_partition.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( + instance_partition=instance_partition, + ) + + # Make the request + operation = client.update_instance_partition(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py new file mode 100644 index 0000000000..2340e701e1 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-spanner-admin-instance + + +# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import spanner_admin_instance_v1 + + +def sample_update_instance(): + # Create a client + client = spanner_admin_instance_v1.InstanceAdminClient() + + # Initialize request argument(s) + instance = spanner_admin_instance_v1.Instance() + instance.name = "name_value" + instance.config = "config_value" + instance.display_name = "display_name_value" + + request = spanner_admin_instance_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py new file mode 100644 index 0000000000..8200af5099 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py @@ -0,0 +1,196 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class spanner_admin_instanceCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'create_instance_config': ('parent', 'instance_config_id', 'instance_config', 'validate_only', ), + 'create_instance_partition': ('parent', 'instance_partition_id', 'instance_partition', ), + 'delete_instance': ('name', ), + 'delete_instance_config': ('name', 'etag', 'validate_only', ), + 'delete_instance_partition': ('name', 'etag', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_instance': ('name', 'field_mask', ), + 'get_instance_config': ('name', ), + 'get_instance_partition': ('name', ), + 'list_instance_config_operations': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_instance_configs': ('parent', 'page_size', 'page_token', ), + 'list_instance_partition_operations': ('parent', 'filter', 'page_size', 'page_token', 'instance_partition_deadline', ), + 'list_instance_partitions': ('parent', 'page_size', 'page_token', 'instance_partition_deadline', ), + 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'instance_deadline', ), + 'move_instance': ('name', 'target_config', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_instance': ('instance', 'field_mask', ), + 'update_instance_config': ('instance_config', 'update_mask', 'validate_only', ), + 'update_instance_partition': ('instance_partition', 'field_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=spanner_admin_instanceCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the spanner_admin_instance client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_instance/v1/setup.py b/owl-bot-staging/spanner_admin_instance/v1/setup.py new file mode 100644 index 0000000000..8d7bb912c2 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/setup.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-spanner-admin-instance' + + +description = "Google Cloud Spanner Admin Instance API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/spanner_admin_instance/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-instance" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt new file mode 100644 index 0000000000..2ae5a677e8 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000000..5b1ee6c35a --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt @@ -0,0 +1,13 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files +proto-plus==1.22.3 +protobuf==3.20.2 +grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000000..ef1c92ffff --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py new file mode 100644 index 0000000000..191773d557 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py new file mode 100644 index 0000000000..82c9940cf7 --- /dev/null +++ b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py @@ -0,0 +1,17636 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import re +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminAsyncClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminClient +from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers +from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports +from google.cloud.spanner_admin_instance_v1.types import common +from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceAdminClient._get_default_mtls_endpoint(None) is None + assert InstanceAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert InstanceAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert InstanceAdminClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + InstanceAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert InstanceAdminClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert InstanceAdminClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceAdminClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert InstanceAdminClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert InstanceAdminClient._get_client_cert_source(None, False) is None + assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert InstanceAdminClient._get_client_cert_source(None, True) is mock_default_cert_source + assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = InstanceAdminClient._DEFAULT_UNIVERSE + default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert InstanceAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT + assert InstanceAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert InstanceAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert InstanceAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert InstanceAdminClient._get_universe_domain(None, None) == InstanceAdminClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + InstanceAdminClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceAdminClient, "grpc"), + (InstanceAdminAsyncClient, "grpc_asyncio"), + (InstanceAdminClient, "rest"), +]) +def test_instance_admin_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.InstanceAdminGrpcTransport, "grpc"), + (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.InstanceAdminRestTransport, "rest"), +]) +def test_instance_admin_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (InstanceAdminClient, "grpc"), + (InstanceAdminAsyncClient, "grpc_asyncio"), + (InstanceAdminClient, "rest"), +]) +def test_instance_admin_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://spanner.googleapis.com' + ) + + +def test_instance_admin_client_get_transport_class(): + transport = InstanceAdminClient.get_transport_class() + available_transports = [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminRestTransport, + ] + assert transport in available_transports + + transport = InstanceAdminClient.get_transport_class("grpc") + assert transport == transports.InstanceAdminGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), +]) +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +def test_instance_admin_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "true"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "false"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "true"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "false"), +]) +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + InstanceAdminClient, InstanceAdminAsyncClient +]) +@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) +def test_instance_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + InstanceAdminClient, InstanceAdminAsyncClient +]) +@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) +@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) +def test_instance_admin_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = InstanceAdminClient._DEFAULT_UNIVERSE + default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), +]) +def test_instance_admin_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", None), +]) +def test_instance_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_instance_admin_client_client_options_from_dict(): + with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = InstanceAdminClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_instance_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=None, + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigsRequest, + dict, +]) +def test_list_instance_configs(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_instance_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstanceConfigsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_instance_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc + request = {} + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_configs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_configs] = mock_rpc + + request = {} + await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_configs_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_instance_configs_async_from_dict(): + await test_list_instance_configs_async(request_type=dict) + +def test_list_instance_configs_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_configs_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) + await client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_configs_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_configs_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_configs_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_configs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_configs_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + + +def test_list_instance_configs_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_configs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in results) +def test_list_instance_configs_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_configs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_configs_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_configs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_configs_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceConfigRequest, + dict, +]) +def test_get_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + ) + response = client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED + assert response.base_config == 'base_config_value' + assert response.etag == 'etag_value' + assert response.leader_options == ['leader_options_value'] + assert response.reconciling is True + assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING + assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE + assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION + assert response.storage_limit_per_processing_unit == 3540 + + +def test_get_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.GetInstanceConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceConfigRequest( + name='name_value', + ) + +def test_get_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc + request = {} + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance_config] = mock_rpc + + request = {} + await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + )) + response = await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED + assert response.base_config == 'base_config_value' + assert response.etag == 'etag_value' + assert response.leader_options == ['leader_options_value'] + assert response.reconciling is True + assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING + assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE + assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION + assert response.storage_limit_per_processing_unit == 3540 + + +@pytest.mark.asyncio +async def test_get_instance_config_async_from_dict(): + await test_get_instance_config_async(request_type=dict) + +def test_get_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = spanner_instance_admin.InstanceConfig() + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) + await client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstanceConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceConfigRequest, + dict, +]) +def test_create_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.CreateInstanceConfigRequest( + parent='parent_value', + instance_config_id='instance_config_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceConfigRequest( + parent='parent_value', + instance_config_id='instance_config_id_value', + ) + +def test_create_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc + request = {} + client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance_config] = mock_rpc + + request = {} + await client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_config_async_from_dict(): + await test_create_instance_config_async(request_type=dict) + +def test_create_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceConfigRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance_config( + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].instance_config_id + mock_val = 'instance_config_id_value' + assert arg == mock_val + + +def test_create_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_config( + spanner_instance_admin.CreateInstanceConfigRequest(), + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + +@pytest.mark.asyncio +async def test_create_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance_config( + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].instance_config_id + mock_val = 'instance_config_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance_config( + spanner_instance_admin.CreateInstanceConfigRequest(), + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceConfigRequest, + dict, +]) +def test_update_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.UpdateInstanceConfigRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceConfigRequest( + ) + +def test_update_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc + request = {} + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance_config] = mock_rpc + + request = {} + await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_config_async_from_dict(): + await test_update_instance_config_async(request_type=dict) + +def test_update_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceConfigRequest() + + request.instance_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_config.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceConfigRequest() + + request.instance_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_config.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance_config( + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_config( + spanner_instance_admin.UpdateInstanceConfigRequest(), + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance_config( + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance_config + mock_val = spanner_instance_admin.InstanceConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance_config( + spanner_instance_admin.UpdateInstanceConfigRequest(), + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceConfigRequest, + dict, +]) +def test_delete_instance_config(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.DeleteInstanceConfigRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceConfigRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_instance_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc + request = {} + client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance_config] = mock_rpc + + request = {} + await client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceConfigRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_config_async_from_dict(): + await test_delete_instance_config_async(request_type=dict) + +def test_delete_instance_config_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value = None + client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_config_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_config_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_config_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_config( + spanner_instance_admin.DeleteInstanceConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_config_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_config_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance_config( + spanner_instance_admin.DeleteInstanceConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigOperationsRequest, + dict, +]) +def test_list_instance_config_operations(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_instance_config_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstanceConfigOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_config_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstanceConfigOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_instance_config_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_config_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc + request = {} + client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_config_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_config_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_config_operations] = mock_rpc + + request = {} + await client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_config_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_from_dict(): + await test_list_instance_config_operations_async(request_type=dict) + +def test_list_instance_config_operations_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_config_operations_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) + await client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_config_operations_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_config_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_config_operations_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_config_operations( + spanner_instance_admin.ListInstanceConfigOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_config_operations_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_config_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_config_operations_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_config_operations( + spanner_instance_admin.ListInstanceConfigOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_config_operations_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_config_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_instance_config_operations_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_config_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_config_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_config_operations_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_config_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancesRequest, + dict, +]) +def test_list_instances(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + filter='filter_value', + ) + +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instances in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancesRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + +def test_list_instances_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instances_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in results) +def test_list_instances_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionsRequest, + dict, +]) +def test_list_instance_partitions(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instance_partitions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstancePartitionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_partitions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancePartitionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_instance_partitions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc + request = {} + client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_partitions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_partitions] = mock_rpc + + request = {} + await client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partitions_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_from_dict(): + await test_list_instance_partitions_async(request_type=dict) + +def test_list_instance_partitions_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_partitions_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) + await client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_partitions_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_partitions_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partitions( + spanner_instance_admin.ListInstancePartitionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_partitions_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_partitions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_partitions_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_partitions( + spanner_instance_admin.ListInstancePartitionsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partitions_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_partitions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstancePartition) + for i in results) +def test_list_instance_partitions_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_partitions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_partitions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, spanner_instance_admin.InstancePartition) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_partitions_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_partitions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceRequest, + dict, +]) +def test_get_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED + assert response.endpoint_uris == ['endpoint_uris_value'] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD + assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE + + +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.GetInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstanceRequest( + name='name_value', + ) + +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + )) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED + assert response.endpoint_uris == ['endpoint_uris_value'] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD + assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + +def test_get_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = spanner_instance_admin.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceRequest, + dict, +]) +def test_create_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + +def test_create_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstanceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceRequest, + dict, +]) +def test_update_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.UpdateInstanceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstanceRequest( + ) + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + +def test_update_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceRequest() + + request.instance.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstanceRequest() + + request.instance.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance + mock_val = spanner_instance_admin.Instance(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceRequest, + dict, +]) +def test_delete_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.DeleteInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstanceRequest( + name='name_value', + ) + +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + +def test_delete_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = None + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource='resource_value', + ) + +def test_set_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc + + request = {} + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + +def test_set_iam_policy_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource='resource_value', + ) + +def test_get_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc + + request = {} + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + +def test_get_iam_policy_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource='resource_value', + ) + +def test_test_iam_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc + + request = {} + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + +def test_test_iam_permissions_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_test_iam_permissions_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + + +def test_test_iam_permissions_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.test_iam_permissions( + resource='resource_value', + permissions=['permissions_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + arg = args[0].permissions + mock_val = ['permissions_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_test_iam_permissions_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstancePartitionRequest, + dict, +]) +def test_get_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + node_count=1070, + ) + response = client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstancePartition) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.state == spanner_instance_admin.InstancePartition.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.referencing_backups == ['referencing_backups_value'] + assert response.etag == 'etag_value' + + +def test_get_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.GetInstancePartitionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.GetInstancePartitionRequest( + name='name_value', + ) + +def test_get_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc + request = {} + client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance_partition] = mock_rpc + + request = {} + await client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + )) + response = await client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.GetInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstancePartition) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.state == spanner_instance_admin.InstancePartition.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.referencing_backups == ['referencing_backups_value'] + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_instance_partition_async_from_dict(): + await test_get_instance_partition_async(request_type=dict) + +def test_get_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value = spanner_instance_admin.InstancePartition() + client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.GetInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) + await client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstancePartition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_partition( + spanner_instance_admin.GetInstancePartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.InstancePartition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_partition( + spanner_instance_admin.GetInstancePartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstancePartitionRequest, + dict, +]) +def test_create_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.CreateInstancePartitionRequest( + parent='parent_value', + instance_partition_id='instance_partition_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.CreateInstancePartitionRequest( + parent='parent_value', + instance_partition_id='instance_partition_id_value', + ) + +def test_create_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc + request = {} + client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance_partition] = mock_rpc + + request = {} + await client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.CreateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_partition_async_from_dict(): + await test_create_instance_partition_async(request_type=dict) + +def test_create_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstancePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.CreateInstancePartitionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance_partition( + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].instance_partition_id + mock_val = 'instance_partition_id_value' + assert arg == mock_val + + +def test_create_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_partition( + spanner_instance_admin.CreateInstancePartitionRequest(), + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + +@pytest.mark.asyncio +async def test_create_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance_partition( + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].instance_partition_id + mock_val = 'instance_partition_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance_partition( + spanner_instance_admin.CreateInstancePartitionRequest(), + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstancePartitionRequest, + dict, +]) +def test_delete_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.DeleteInstancePartitionRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.DeleteInstancePartitionRequest( + name='name_value', + etag='etag_value', + ) + +def test_delete_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc + request = {} + client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance_partition] = mock_rpc + + request = {} + await client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.DeleteInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_instance_partition_async_from_dict(): + await test_delete_instance_partition_async(request_type=dict) + +def test_delete_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value = None + client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.DeleteInstancePartitionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_partition( + spanner_instance_admin.DeleteInstancePartitionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance_partition( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance_partition( + spanner_instance_admin.DeleteInstancePartitionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstancePartitionRequest, + dict, +]) +def test_update_instance_partition(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_partition_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.UpdateInstancePartitionRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance_partition(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.UpdateInstancePartitionRequest( + ) + +def test_update_instance_partition_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc + request = {} + client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance_partition in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance_partition] = mock_rpc + + request = {} + await client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstancePartitionRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.UpdateInstancePartitionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_partition_async_from_dict(): + await test_update_instance_partition_async(request_type=dict) + +def test_update_instance_partition_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstancePartitionRequest() + + request.instance_partition.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_partition.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_partition_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.UpdateInstancePartitionRequest() + + request.instance_partition.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance_partition.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_partition_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance_partition( + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_instance_partition_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_partition( + spanner_instance_admin.UpdateInstancePartitionRequest(), + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_instance_partition_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance_partition( + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].instance_partition + mock_val = spanner_instance_admin.InstancePartition(name='name_value') + assert arg == mock_val + arg = args[0].field_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_partition_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance_partition( + spanner_instance_admin.UpdateInstancePartitionRequest(), + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionOperationsRequest, + dict, +]) +def test_list_instance_partition_operations(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + ) + response = client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionOperationsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] + + +def test_list_instance_partition_operations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.ListInstancePartitionOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instance_partition_operations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.ListInstancePartitionOperationsRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +def test_list_instance_partition_operations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc + request = {} + client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partition_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instance_partition_operations in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instance_partition_operations] = mock_rpc + + request = {} + await client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instance_partition_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + )) + response = await client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionOperationsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] + + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_from_dict(): + await test_list_instance_partition_operations_async(request_type=dict) + +def test_list_instance_partition_operations_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) + await client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instance_partition_operations_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instance_partition_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instance_partition_operations_flattened_error(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partition_operations( + spanner_instance_admin.ListInstancePartitionOperationsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_flattened_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instance_partition_operations( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_flattened_error_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instance_partition_operations( + spanner_instance_admin.ListInstancePartitionOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partition_operations_pager(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instance_partition_operations(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) +def test_list_instance_partition_operations_pages(transport_name: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instance_partition_operations(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_pager(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instance_partition_operations(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instance_partition_operations_async_pages(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instance_partition_operations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.MoveInstanceRequest, + dict, +]) +def test_move_instance(request_type, transport: str = 'grpc'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.MoveInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_move_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = spanner_instance_admin.MoveInstanceRequest( + name='name_value', + target_config='target_config_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.move_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == spanner_instance_admin.MoveInstanceRequest( + name='name_value', + target_config='target_config_value', + ) + +def test_move_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + request = {} + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_move_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.move_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.move_instance] = mock_rpc + + request = {} + await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_move_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.MoveInstanceRequest): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = spanner_instance_admin.MoveInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_move_instance_async_from_dict(): + await test_move_instance_async(request_type=dict) + +def test_move_instance_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.MoveInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_move_instance_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = spanner_instance_admin.MoveInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_list_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_configs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc + + request = {} + client.list_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_configs_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_configs(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_configs_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_configs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_configs_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) + + +def test_list_instance_configs_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_configs( + spanner_instance_admin.ListInstanceConfigsRequest(), + parent='parent_value', + ) + + +def test_list_instance_configs_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigsResponse( + instance_configs=[ + spanner_instance_admin.InstanceConfig(), + spanner_instance_admin.InstanceConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstanceConfigsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_instance_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstanceConfig) + for i in results) + + pages = list(client.list_instance_configs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc + + request = {} + client.get_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_config_rest_required_fields(request_type=spanner_instance_admin.GetInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstanceConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstanceConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) + + +def test_get_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_config( + spanner_instance_admin.GetInstanceConfigRequest(), + name='name_value', + ) + + +def test_create_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc + + request = {} + client.create_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_config_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceConfigId"] = 'instance_config_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceConfigId" in jsonified_request + assert jsonified_request["instanceConfigId"] == 'instance_config_id_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "instanceConfigId", "instanceConfig", ))) + + +def test_create_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) + + +def test_create_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_config( + spanner_instance_admin.CreateInstanceConfigRequest(), + parent='parent_value', + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + instance_config_id='instance_config_id_value', + ) + + +def test_update_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc + + request = {} + client.update_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_config_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instanceConfig", "updateMask", ))) + + +def test_update_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} + + # get truthy value for each flattened field + mock_args = dict( + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance_config.name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) + + +def test_update_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_config( + spanner_instance_admin.UpdateInstanceConfigRequest(), + instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc + + request = {} + client.delete_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_config_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance_config(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_config_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) + + +def test_delete_instance_config_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) + + +def test_delete_instance_config_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_config( + spanner_instance_admin.DeleteInstanceConfigRequest(), + name='name_value', + ) + + +def test_list_instance_config_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_config_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc + + request = {} + client.list_instance_config_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_config_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_config_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_config_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_config_operations_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_config_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_config_operations_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_config_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigOperations" % client.transport._host, args[1]) + + +def test_list_instance_config_operations_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_config_operations( + spanner_instance_admin.ListInstanceConfigOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_config_operations_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstanceConfigOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_instance_config_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_instance_config_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields(request_type=spanner_instance_admin.ListInstancesRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "instance_deadline", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instances(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "instanceDeadline", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instances_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + spanner_instance_admin.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancesResponse( + instances=[ + spanner_instance_admin.Instance(), + spanner_instance_admin.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.Instance) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_list_instance_partitions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partitions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc + + request = {} + client.list_instance_partitions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partitions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_partitions_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_partition_deadline", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_partitions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_partitions_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_partitions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_partitions_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_partitions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) + + +def test_list_instance_partitions_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partitions( + spanner_instance_admin.ListInstancePartitionsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partitions_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionsResponse( + instance_partitions=[ + spanner_instance_admin.InstancePartition(), + spanner_instance_admin.InstancePartition(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstancePartitionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_instance_partitions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, spanner_instance_admin.InstancePartition) + for i in results) + + pages = list(client.list_instance_partitions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields(request_type=spanner_instance_admin.GetInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("field_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("fieldMask", )) & set(("name", ))) + + +def test_get_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + spanner_instance_admin.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "instanceId", "instance", ))) + + +def test_create_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) + + +def test_create_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + spanner_instance_admin.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=spanner_instance_admin.Instance(name='name_value'), + ) + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instance", "fieldMask", ))) + + +def test_update_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance': {'name': 'projects/sample1/instances/sample2'}} + + # get truthy value for each flattened field + mock_args = dict( + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance.name=projects/*/instances/*}" % client.transport._host, args[1]) + + +def test_update_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + spanner_instance_admin.UpdateInstanceRequest(), + instance=spanner_instance_admin.Instance(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_instance_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) + + +def test_delete_instance_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + spanner_instance_admin.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) + + +def test_set_iam_policy_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:setIamPolicy" % client.transport._host, args[1]) + + +def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", ))) + + +def test_get_iam_policy_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:getIamPolicy" % client.transport._host, args[1]) + + +def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.test_iam_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = 'resource_value' + jsonified_request["permissions"] = 'permissions_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == 'permissions_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) + + +def test_test_iam_permissions_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'resource': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + resource='resource_value', + permissions=['permissions_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:testIamPermissions" % client.transport._host, args[1]) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + iam_policy_pb2.TestIamPermissionsRequest(), + resource='resource_value', + permissions=['permissions_value'], + ) + + +def test_get_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc + + request = {} + client.get_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_partition_rest_required_fields(request_type=spanner_instance_admin.GetInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstancePartition() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstancePartition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstancePartition() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstancePartition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) + + +def test_get_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_partition( + spanner_instance_admin.GetInstancePartitionRequest(), + name='name_value', + ) + + +def test_create_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc + + request = {} + client.create_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_partition_rest_required_fields(request_type=spanner_instance_admin.CreateInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_partition_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instancePartitionId"] = 'instance_partition_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instancePartitionId" in jsonified_request + assert jsonified_request["instancePartitionId"] == 'instance_partition_id_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "instancePartitionId", "instancePartition", ))) + + +def test_create_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) + + +def test_create_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance_partition( + spanner_instance_admin.CreateInstancePartitionRequest(), + parent='parent_value', + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + instance_partition_id='instance_partition_id_value', + ) + + +def test_delete_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc + + request = {} + client.delete_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_partition_rest_required_fields(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag", )) & set(("name", ))) + + +def test_delete_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) + + +def test_delete_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance_partition( + spanner_instance_admin.DeleteInstancePartitionRequest(), + name='name_value', + ) + + +def test_update_instance_partition_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance_partition in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc + + request = {} + client.update_instance_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance_partition(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_partition_rest_required_fields(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance_partition(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_partition_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance_partition._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("instancePartition", "fieldMask", ))) + + +def test_update_instance_partition_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance_partition(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) + + +def test_update_instance_partition_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance_partition( + spanner_instance_admin.UpdateInstancePartitionRequest(), + instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), + field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_list_instance_partition_operations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc + + request = {} + client.list_instance_partition_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instance_partition_operations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instance_partition_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "instance_partition_deadline", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instance_partition_operations(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instance_partition_operations_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instance_partition_operations._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instance_partition_operations_rest_flattened(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instance_partition_operations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitionOperations" % client.transport._host, args[1]) + + +def test_list_instance_partition_operations_rest_flattened_error(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instance_partition_operations( + spanner_instance_admin.ListInstancePartitionOperationsRequest(), + parent='parent_value', + ) + + +def test_list_instance_partition_operations_rest_pager(transport: str = 'rest'): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token='abc', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[], + next_page_token='def', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + ], + next_page_token='ghi', + ), + spanner_instance_admin.ListInstancePartitionOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/instances/sample2'} + + pager = client.list_instance_partition_operations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) + for i in results) + + pages = list(client.list_instance_partition_operations(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_move_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + + request = {} + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_move_instance_rest_required_fields(request_type=spanner_instance_admin.MoveInstanceRequest): + transport_class = transports.InstanceAdminRestTransport + + request_init = {} + request_init["name"] = "" + request_init["target_config"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["targetConfig"] = 'target_config_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "targetConfig" in jsonified_request + assert jsonified_request["targetConfig"] == 'target_config_value' + + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.move_instance(request) + + expected_params = [ + ('$alt', 'json;enum-encoding=int') + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_move_instance_rest_unset_required_fields(): + transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.move_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "targetConfig", ))) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceAdminClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceAdminClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.InstanceAdminGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + transports.InstanceAdminRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = InstanceAdminClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_configs_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + client.list_instance_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + call.return_value = spanner_instance_admin.InstanceConfig() + client.get_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_config_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + call.return_value = None + client.delete_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_config_operations_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + client.list_instance_config_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancesResponse() + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partitions_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + client.list_instance_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = spanner_instance_admin.Instance() + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = None + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + call.return_value = spanner_instance_admin.InstancePartition() + client.get_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + call.return_value = None + client.delete_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_partition_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partition_operations_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + client.list_instance_partition_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_instance_empty_call_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.move_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.MoveInstanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = InstanceAdminAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_configs_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + )) + await client.list_instance_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + )) + await client.get_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_config_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_config_operations_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + )) + await client.list_instance_config_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_partitions_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instance_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + )) + await client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + )) + await client.get_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_partition_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instance_partition_operations_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + )) + await client.list_instance_partition_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_move_instance_empty_call_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.move_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.MoveInstanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = InstanceAdminClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_instance_configs_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_configs(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigsRequest, + dict, +]) +def test_list_instance_configs_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_configs_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_configs") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstanceConfigsRequest.pb(spanner_instance_admin.ListInstanceConfigsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstanceConfigsResponse.to_json(spanner_instance_admin.ListInstanceConfigsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstanceConfigsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstanceConfigsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigsResponse(), metadata + + client.list_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_config_rest_bad_request(request_type=spanner_instance_admin.GetInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceConfigRequest, + dict, +]) +def test_get_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstanceConfig( + name='name_value', + display_name='display_name_value', + config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, + base_config='base_config_value', + etag='etag_value', + leader_options=['leader_options_value'], + reconciling=True, + state=spanner_instance_admin.InstanceConfig.State.CREATING, + free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, + quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, + storage_limit_per_processing_unit=3540, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstanceConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstanceConfig) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED + assert response.base_config == 'base_config_value' + assert response.etag == 'etag_value' + assert response.leader_options == ['leader_options_value'] + assert response.reconciling is True + assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING + assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE + assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION + assert response.storage_limit_per_processing_unit == 3540 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.GetInstanceConfigRequest.pb(spanner_instance_admin.GetInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.InstanceConfig.to_json(spanner_instance_admin.InstanceConfig()) + req.return_value.content = return_value + + request = spanner_instance_admin.GetInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.InstanceConfig() + post_with_metadata.return_value = spanner_instance_admin.InstanceConfig(), metadata + + client.get_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_config_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceConfigRequest, + dict, +]) +def test_create_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.CreateInstanceConfigRequest.pb(spanner_instance_admin.CreateInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.CreateInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_config_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceConfigRequest, + dict, +]) +def test_update_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_config") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.UpdateInstanceConfigRequest.pb(spanner_instance_admin.UpdateInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.UpdateInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_config_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance_config(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceConfigRequest, + dict, +]) +def test_delete_instance_config_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_config_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_config") as pre: + pre.assert_not_called() + pb_message = spanner_instance_admin.DeleteInstanceConfigRequest.pb(spanner_instance_admin.DeleteInstanceConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_instance_admin.DeleteInstanceConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_list_instance_config_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_config_operations(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstanceConfigOperationsRequest, + dict, +]) +def test_list_instance_config_operations_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_config_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstanceConfigOperationsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_config_operations_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_config_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(spanner_instance_admin.ListInstanceConfigOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(spanner_instance_admin.ListInstanceConfigOperationsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstanceConfigOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse(), metadata + + client.list_instance_config_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_instances_rest_bad_request(request_type=spanner_instance_admin.ListInstancesRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instances(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstancesRequest.pb(spanner_instance_admin.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstancesResponse.to_json(spanner_instance_admin.ListInstancesResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstancesResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstancesResponse(), metadata + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_instance_partitions_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_partitions(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionsRequest, + dict, +]) +def test_list_instance_partitions_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_partitions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_partitions_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partitions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstancePartitionsRequest.pb(spanner_instance_admin.ListInstancePartitionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstancePartitionsResponse.to_json(spanner_instance_admin.ListInstancePartitionsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstancePartitionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstancePartitionsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionsResponse(), metadata + + client.list_instance_partitions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=spanner_instance_admin.GetInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.Instance( + name='name_value', + config='config_value', + display_name='display_name_value', + node_count=1070, + processing_units=1743, + state=spanner_instance_admin.Instance.State.CREATING, + instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, + endpoint_uris=['endpoint_uris_value'], + edition=spanner_instance_admin.Instance.Edition.STANDARD, + default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.Instance) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.node_count == 1070 + assert response.processing_units == 1743 + assert response.state == spanner_instance_admin.Instance.State.CREATING + assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED + assert response.endpoint_uris == ['endpoint_uris_value'] + assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD + assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.GetInstanceRequest.pb(spanner_instance_admin.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.Instance.to_json(spanner_instance_admin.Instance()) + req.return_value.content = return_value + + request = spanner_instance_admin.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.Instance() + post_with_metadata.return_value = spanner_instance_admin.Instance(), metadata + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.CreateInstanceRequest.pb(spanner_instance_admin.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.UpdateInstanceRequest.pb(spanner_instance_admin.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + pb_message = spanner_instance_admin.DeleteInstanceRequest.pb(spanner_instance_admin.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_instance_admin.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_set_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.SetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.SetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.GetIamPolicyRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) + req.return_value.content = return_value + + request = iam_policy_pb2.GetIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata + + client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_test_iam_permissions") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = iam_policy_pb2.TestIamPermissionsRequest() + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) + req.return_value.content = return_value + + request = iam_policy_pb2.TestIamPermissionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata + + client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_partition_rest_bad_request(request_type=spanner_instance_admin.GetInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.GetInstancePartitionRequest, + dict, +]) +def test_get_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.InstancePartition( + name='name_value', + config='config_value', + display_name='display_name_value', + state=spanner_instance_admin.InstancePartition.State.CREATING, + referencing_databases=['referencing_databases_value'], + referencing_backups=['referencing_backups_value'], + etag='etag_value', + node_count=1070, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.InstancePartition.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance_partition(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, spanner_instance_admin.InstancePartition) + assert response.name == 'name_value' + assert response.config == 'config_value' + assert response.display_name == 'display_name_value' + assert response.state == spanner_instance_admin.InstancePartition.State.CREATING + assert response.referencing_databases == ['referencing_databases_value'] + assert response.referencing_backups == ['referencing_backups_value'] + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.GetInstancePartitionRequest.pb(spanner_instance_admin.GetInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.InstancePartition.to_json(spanner_instance_admin.InstancePartition()) + req.return_value.content = return_value + + request = spanner_instance_admin.GetInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.InstancePartition() + post_with_metadata.return_value = spanner_instance_admin.InstancePartition(), metadata + + client.get_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_partition_rest_bad_request(request_type=spanner_instance_admin.CreateInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.CreateInstancePartitionRequest, + dict, +]) +def test_create_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance_partition(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.CreateInstancePartitionRequest.pb(spanner_instance_admin.CreateInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.CreateInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_partition_rest_bad_request(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.DeleteInstancePartitionRequest, + dict, +]) +def test_delete_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance_partition(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_partition") as pre: + pre.assert_not_called() + pb_message = spanner_instance_admin.DeleteInstancePartitionRequest.pb(spanner_instance_admin.DeleteInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = spanner_instance_admin.DeleteInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_update_instance_partition_rest_bad_request(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance_partition(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.UpdateInstancePartitionRequest, + dict, +]) +def test_update_instance_partition_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance_partition(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_partition_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_partition") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.UpdateInstancePartitionRequest.pb(spanner_instance_admin.UpdateInstancePartitionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.UpdateInstancePartitionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_instance_partition_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instance_partition_operations(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.ListInstancePartitionOperationsRequest, + dict, +]) +def test_list_instance_partition_operations_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( + next_page_token='next_page_token_value', + unreachable_instance_partitions=['unreachable_instance_partitions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instance_partition_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancePartitionOperationsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instance_partition_operations_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partition_operations") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(spanner_instance_admin.ListInstancePartitionOperationsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(spanner_instance_admin.ListInstancePartitionOperationsResponse()) + req.return_value.content = return_value + + request = spanner_instance_admin.ListInstancePartitionOperationsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() + post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse(), metadata + + client.list_instance_partition_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_move_instance_rest_bad_request(request_type=spanner_instance_admin.MoveInstanceRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.move_instance(request) + + +@pytest.mark.parametrize("request_type", [ + spanner_instance_admin.MoveInstanceRequest, + dict, +]) +def test_move_instance_rest_call_success(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/instances/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.move_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_move_instance_rest_interceptors(null_interceptor): + transport = transports.InstanceAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), + ) + client = InstanceAdminClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance") as post, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_move_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = spanner_instance_admin.MoveInstanceRequest.pb(spanner_instance_admin.MoveInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = spanner_instance_admin.MoveInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.move_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_configs_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_configs), + '__call__') as call: + client.list_instance_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_config), + '__call__') as call: + client.get_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_config), + '__call__') as call: + client.create_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_config), + '__call__') as call: + client.update_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_config_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_config), + '__call__') as call: + client.delete_instance_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_config_operations_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_config_operations), + '__call__') as call: + client.list_instance_config_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partitions_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partitions), + '__call__') as call: + client.list_instance_partitions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_partition), + '__call__') as call: + client.get_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.GetInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance_partition), + '__call__') as call: + client.create_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.CreateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance_partition), + '__call__') as call: + client.delete_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_partition_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance_partition), + '__call__') as call: + client.update_instance_partition(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instance_partition_operations_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instance_partition_operations), + '__call__') as call: + client.list_instance_partition_operations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_instance_empty_call_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.move_instance), + '__call__') as call: + client.move_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = spanner_instance_admin.MoveInstanceRequest() + + assert args[0] == request_msg + + +def test_instance_admin_rest_lro_client(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.InstanceAdminGrpcTransport, + ) + +def test_instance_admin_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_instance_admin_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.InstanceAdminTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instance_configs', + 'get_instance_config', + 'create_instance_config', + 'update_instance_config', + 'delete_instance_config', + 'list_instance_config_operations', + 'list_instances', + 'list_instance_partitions', + 'get_instance', + 'create_instance', + 'update_instance', + 'delete_instance', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_instance_partition', + 'create_instance_partition', + 'delete_instance_partition', + 'update_instance_partition', + 'list_instance_partition_operations', + 'move_instance', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_admin_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceAdminTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id="octopus", + ) + + +def test_instance_admin_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceAdminTransport() + adc.assert_called_once() + + +def test_instance_admin_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceAdminClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + ], +) +def test_instance_admin_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceAdminGrpcTransport, + transports.InstanceAdminGrpcAsyncIOTransport, + transports.InstanceAdminRestTransport, + ], +) +def test_instance_admin_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.InstanceAdminGrpcTransport, grpc_helpers), + (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_instance_admin_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "spanner.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/spanner.admin', +), + scopes=["1", "2"], + default_host="spanner.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_instance_admin_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.InstanceAdminRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_instance_admin_host_no_port(transport_name): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_instance_admin_host_with_port(transport_name): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'spanner.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://spanner.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_instance_admin_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceAdminClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceAdminClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instance_configs._session + session2 = client2.transport.list_instance_configs._session + assert session1 != session2 + session1 = client1.transport.get_instance_config._session + session2 = client2.transport.get_instance_config._session + assert session1 != session2 + session1 = client1.transport.create_instance_config._session + session2 = client2.transport.create_instance_config._session + assert session1 != session2 + session1 = client1.transport.update_instance_config._session + session2 = client2.transport.update_instance_config._session + assert session1 != session2 + session1 = client1.transport.delete_instance_config._session + session2 = client2.transport.delete_instance_config._session + assert session1 != session2 + session1 = client1.transport.list_instance_config_operations._session + session2 = client2.transport.list_instance_config_operations._session + assert session1 != session2 + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.list_instance_partitions._session + session2 = client2.transport.list_instance_partitions._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.get_instance_partition._session + session2 = client2.transport.get_instance_partition._session + assert session1 != session2 + session1 = client1.transport.create_instance_partition._session + session2 = client2.transport.create_instance_partition._session + assert session1 != session2 + session1 = client1.transport.delete_instance_partition._session + session2 = client2.transport.delete_instance_partition._session + assert session1 != session2 + session1 = client1.transport.update_instance_partition._session + session2 = client2.transport.update_instance_partition._session + assert session1 != session2 + session1 = client1.transport.list_instance_partition_operations._session + session2 = client2.transport.list_instance_partition_operations._session + assert session1 != session2 + session1 = client1.transport.move_instance._session + session2 = client2.transport.move_instance._session + assert session1 != session2 +def test_instance_admin_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InstanceAdminGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_instance_admin_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.InstanceAdminGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) +def test_instance_admin_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_instance_admin_grpc_lro_client(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_admin_grpc_lro_async_client(): + client = InstanceAdminAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + instance = "clam" + expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) + actual = InstanceAdminClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "whelk", + "instance": "octopus", + } + path = InstanceAdminClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_path(path) + assert expected == actual + +def test_instance_config_path(): + project = "oyster" + instance_config = "nudibranch" + expected = "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) + actual = InstanceAdminClient.instance_config_path(project, instance_config) + assert expected == actual + + +def test_parse_instance_config_path(): + expected = { + "project": "cuttlefish", + "instance_config": "mussel", + } + path = InstanceAdminClient.instance_config_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_config_path(path) + assert expected == actual + +def test_instance_partition_path(): + project = "winkle" + instance = "nautilus" + instance_partition = "scallop" + expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) + actual = InstanceAdminClient.instance_partition_path(project, instance, instance_partition) + assert expected == actual + + +def test_parse_instance_partition_path(): + expected = { + "project": "abalone", + "instance": "squid", + "instance_partition": "clam", + } + path = InstanceAdminClient.instance_partition_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_instance_partition_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = InstanceAdminClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = InstanceAdminClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = InstanceAdminClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = InstanceAdminClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = InstanceAdminClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = InstanceAdminClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = InstanceAdminClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = InstanceAdminClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = InstanceAdminClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = InstanceAdminClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceAdminClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: + transport_class = InstanceAdminClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = InstanceAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = InstanceAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (InstanceAdminClient, transports.InstanceAdminGrpcTransport), + (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From e812aec2c6d978562a56670185873f0df0f7d7af Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 28 Oct 2025 23:03:27 +0000 Subject: [PATCH 4/4] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../types/spanner_instance_admin.py | 11 + owl-bot-staging/spanner/v1/.coveragerc | 13 - owl-bot-staging/spanner/v1/.flake8 | 34 - owl-bot-staging/spanner/v1/LICENSE | 202 - owl-bot-staging/spanner/v1/MANIFEST.in | 20 - owl-bot-staging/spanner/v1/README.rst | 143 - .../spanner/v1/docs/_static/custom.css | 20 - .../spanner/v1/docs/_templates/layout.html | 50 - owl-bot-staging/spanner/v1/docs/conf.py | 385 - owl-bot-staging/spanner/v1/docs/index.rst | 10 - .../spanner/v1/docs/multiprocessing.rst | 7 - .../spanner/v1/docs/spanner_v1/services_.rst | 6 - .../spanner/v1/docs/spanner_v1/spanner.rst | 10 - .../spanner/v1/docs/spanner_v1/types_.rst | 6 - .../v1/google/cloud/spanner/__init__.py | 113 - .../v1/google/cloud/spanner/gapic_version.py | 16 - .../spanner/v1/google/cloud/spanner/py.typed | 2 - .../v1/google/cloud/spanner_v1/__init__.py | 114 - .../cloud/spanner_v1/gapic_metadata.json | 268 - .../google/cloud/spanner_v1/gapic_version.py | 16 - .../v1/google/cloud/spanner_v1/py.typed | 2 - .../cloud/spanner_v1/services/__init__.py | 15 - .../spanner_v1/services/spanner/__init__.py | 22 - .../services/spanner/async_client.py | 2125 -- .../spanner_v1/services/spanner/client.py | 2486 -- .../spanner_v1/services/spanner/pagers.py | 166 - .../services/spanner/transports/README.rst | 9 - .../services/spanner/transports/__init__.py | 38 - .../services/spanner/transports/base.py | 505 - .../services/spanner/transports/grpc.py | 899 - .../spanner/transports/grpc_asyncio.py | 1125 - .../services/spanner/transports/rest.py | 2898 -- .../services/spanner/transports/rest_base.py | 817 - .../google/cloud/spanner_v1/types/__init__.py | 122 - .../cloud/spanner_v1/types/change_stream.py | 683 - .../cloud/spanner_v1/types/commit_response.py | 104 - .../v1/google/cloud/spanner_v1/types/keys.py | 248 - .../google/cloud/spanner_v1/types/mutation.py | 201 - .../cloud/spanner_v1/types/query_plan.py | 219 - .../cloud/spanner_v1/types/result_set.py | 379 - .../google/cloud/spanner_v1/types/spanner.py | 1782 -- .../cloud/spanner_v1/types/transaction.py | 491 - .../v1/google/cloud/spanner_v1/types/type.py | 288 - owl-bot-staging/spanner/v1/mypy.ini | 3 - owl-bot-staging/spanner/v1/noxfile.py | 601 - .../snippet_metadata_google.spanner.v1.json | 2579 -- ...ted_spanner_batch_create_sessions_async.py | 53 - ...ated_spanner_batch_create_sessions_sync.py | 53 - ..._v1_generated_spanner_batch_write_async.py | 57 - ...r_v1_generated_spanner_batch_write_sync.py | 57 - ...nerated_spanner_begin_transaction_async.py | 52 - ...enerated_spanner_begin_transaction_sync.py | 52 - ...anner_v1_generated_spanner_commit_async.py | 53 - ...panner_v1_generated_spanner_commit_sync.py | 53 - ..._generated_spanner_create_session_async.py | 52 - ...1_generated_spanner_create_session_sync.py | 52 - ..._generated_spanner_delete_session_async.py | 50 - ...1_generated_spanner_delete_session_sync.py | 50 - ...nerated_spanner_execute_batch_dml_async.py | 57 - ...enerated_spanner_execute_batch_dml_sync.py | 57 - ..._v1_generated_spanner_execute_sql_async.py | 53 - ...r_v1_generated_spanner_execute_sql_sync.py | 53 - ...ted_spanner_execute_streaming_sql_async.py | 54 - ...ated_spanner_execute_streaming_sql_sync.py | 54 - ..._v1_generated_spanner_get_session_async.py | 52 - ...r_v1_generated_spanner_get_session_sync.py | 52 - ...1_generated_spanner_list_sessions_async.py | 53 - ...v1_generated_spanner_list_sessions_sync.py | 53 - ...generated_spanner_partition_query_async.py | 53 - ..._generated_spanner_partition_query_sync.py | 53 - ..._generated_spanner_partition_read_async.py | 53 - ...1_generated_spanner_partition_read_sync.py | 53 - ...spanner_v1_generated_spanner_read_async.py | 54 - .../spanner_v1_generated_spanner_read_sync.py | 54 - ...ner_v1_generated_spanner_rollback_async.py | 51 - ...nner_v1_generated_spanner_rollback_sync.py | 51 - ..._generated_spanner_streaming_read_async.py | 55 - ...1_generated_spanner_streaming_read_sync.py | 55 - .../v1/scripts/fixup_spanner_v1_keywords.py | 191 - owl-bot-staging/spanner/v1/setup.py | 101 - .../spanner/v1/testing/constraints-3.10.txt | 8 - .../spanner/v1/testing/constraints-3.11.txt | 8 - .../spanner/v1/testing/constraints-3.12.txt | 8 - .../spanner/v1/testing/constraints-3.13.txt | 12 - .../spanner/v1/testing/constraints-3.14.txt | 12 - .../spanner/v1/testing/constraints-3.7.txt | 12 - .../spanner/v1/testing/constraints-3.8.txt | 8 - .../spanner/v1/testing/constraints-3.9.txt | 8 - owl-bot-staging/spanner/v1/tests/__init__.py | 16 - .../spanner/v1/tests/unit/__init__.py | 16 - .../spanner/v1/tests/unit/gapic/__init__.py | 16 - .../tests/unit/gapic/spanner_v1/__init__.py | 16 - .../unit/gapic/spanner_v1/test_spanner.py | 11446 -------- .../spanner_admin_database/v1/.coveragerc | 13 - .../spanner_admin_database/v1/.flake8 | 34 - .../spanner_admin_database/v1/LICENSE | 202 - .../spanner_admin_database/v1/MANIFEST.in | 20 - .../spanner_admin_database/v1/README.rst | 143 - .../v1/docs/_static/custom.css | 20 - .../v1/docs/_templates/layout.html | 50 - .../spanner_admin_database/v1/docs/conf.py | 385 - .../spanner_admin_database/v1/docs/index.rst | 10 - .../v1/docs/multiprocessing.rst | 7 - .../database_admin.rst | 10 - .../spanner_admin_database_v1/services_.rst | 6 - .../docs/spanner_admin_database_v1/types_.rst | 6 - .../cloud/spanner_admin_database/__init__.py | 149 - .../spanner_admin_database/gapic_version.py | 16 - .../cloud/spanner_admin_database/py.typed | 2 - .../spanner_admin_database_v1/__init__.py | 150 - .../gapic_metadata.json | 433 - .../gapic_version.py | 16 - .../cloud/spanner_admin_database_v1/py.typed | 2 - .../services/__init__.py | 15 - .../services/database_admin/__init__.py | 22 - .../services/database_admin/async_client.py | 3968 --- .../services/database_admin/client.py | 4387 --- .../services/database_admin/pagers.py | 864 - .../database_admin/transports/README.rst | 9 - .../database_admin/transports/__init__.py | 38 - .../database_admin/transports/base.py | 795 - .../database_admin/transports/grpc.py | 1285 - .../database_admin/transports/grpc_asyncio.py | 1656 -- .../database_admin/transports/rest.py | 5336 ---- .../database_admin/transports/rest_base.py | 1378 - .../types/__init__.py | 148 - .../spanner_admin_database_v1/types/backup.py | 1097 - .../types/backup_schedule.py | 369 - .../spanner_admin_database_v1/types/common.py | 179 - .../types/spanner_database_admin.py | 1348 - .../spanner_admin_database/v1/mypy.ini | 3 - .../spanner_admin_database/v1/noxfile.py | 601 - ...data_google.spanner.admin.database.v1.json | 4480 ---- ...d_database_admin_add_split_points_async.py | 52 - ...ed_database_admin_add_split_points_sync.py | 52 - ...erated_database_admin_copy_backup_async.py | 58 - ...nerated_database_admin_copy_backup_sync.py | 58 - ...ated_database_admin_create_backup_async.py | 57 - ...base_admin_create_backup_schedule_async.py | 53 - ...abase_admin_create_backup_schedule_sync.py | 53 - ...rated_database_admin_create_backup_sync.py | 57 - ...ed_database_admin_create_database_async.py | 57 - ...ted_database_admin_create_database_sync.py | 57 - ...ated_database_admin_delete_backup_async.py | 50 - ...base_admin_delete_backup_schedule_async.py | 50 - ...abase_admin_delete_backup_schedule_sync.py | 50 - ...rated_database_admin_delete_backup_sync.py | 50 - ...ated_database_admin_drop_database_async.py | 50 - ...rated_database_admin_drop_database_sync.py | 50 - ...nerated_database_admin_get_backup_async.py | 52 - ...atabase_admin_get_backup_schedule_async.py | 52 - ...database_admin_get_backup_schedule_sync.py | 52 - ...enerated_database_admin_get_backup_sync.py | 52 - ...rated_database_admin_get_database_async.py | 52 - ...d_database_admin_get_database_ddl_async.py | 52 - ...ed_database_admin_get_database_ddl_sync.py | 52 - ...erated_database_admin_get_database_sync.py | 52 - ...ted_database_admin_get_iam_policy_async.py | 53 - ...ated_database_admin_get_iam_policy_sync.py | 53 - ...n_internal_update_graph_operation_async.py | 54 - ...in_internal_update_graph_operation_sync.py | 54 - ...base_admin_list_backup_operations_async.py | 53 - ...abase_admin_list_backup_operations_sync.py | 53 - ...abase_admin_list_backup_schedules_async.py | 53 - ...tabase_admin_list_backup_schedules_sync.py | 53 - ...rated_database_admin_list_backups_async.py | 53 - ...erated_database_admin_list_backups_sync.py | 53 - ...se_admin_list_database_operations_async.py | 53 - ...ase_admin_list_database_operations_sync.py | 53 - ...atabase_admin_list_database_roles_async.py | 53 - ...database_admin_list_database_roles_sync.py | 53 - ...ted_database_admin_list_databases_async.py | 53 - ...ated_database_admin_list_databases_sync.py | 53 - ...d_database_admin_restore_database_async.py | 58 - ...ed_database_admin_restore_database_sync.py | 58 - ...ted_database_admin_set_iam_policy_async.py | 53 - ...ated_database_admin_set_iam_policy_sync.py | 53 - ...tabase_admin_test_iam_permissions_async.py | 54 - ...atabase_admin_test_iam_permissions_sync.py | 54 - ...ated_database_admin_update_backup_async.py | 51 - ...base_admin_update_backup_schedule_async.py | 51 - ...abase_admin_update_backup_schedule_sync.py | 51 - ...rated_database_admin_update_backup_sync.py | 51 - ...ed_database_admin_update_database_async.py | 59 - ...atabase_admin_update_database_ddl_async.py | 57 - ...database_admin_update_database_ddl_sync.py | 57 - ...ted_database_admin_update_database_sync.py | 59 - ...ixup_spanner_admin_database_v1_keywords.py | 202 - .../spanner_admin_database/v1/setup.py | 102 - .../v1/testing/constraints-3.10.txt | 9 - .../v1/testing/constraints-3.11.txt | 9 - .../v1/testing/constraints-3.12.txt | 9 - .../v1/testing/constraints-3.13.txt | 13 - .../v1/testing/constraints-3.14.txt | 13 - .../v1/testing/constraints-3.7.txt | 13 - .../v1/testing/constraints-3.8.txt | 9 - .../v1/testing/constraints-3.9.txt | 9 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../spanner_admin_database_v1/__init__.py | 16 - .../test_database_admin.py | 22226 ---------------- .../spanner_admin_instance/v1/.coveragerc | 13 - .../spanner_admin_instance/v1/.flake8 | 34 - .../spanner_admin_instance/v1/LICENSE | 202 - .../spanner_admin_instance/v1/MANIFEST.in | 20 - .../spanner_admin_instance/v1/README.rst | 143 - .../v1/docs/_static/custom.css | 20 - .../v1/docs/_templates/layout.html | 50 - .../spanner_admin_instance/v1/docs/conf.py | 385 - .../spanner_admin_instance/v1/docs/index.rst | 10 - .../v1/docs/multiprocessing.rst | 7 - .../instance_admin.rst | 10 - .../spanner_admin_instance_v1/services_.rst | 6 - .../docs/spanner_admin_instance_v1/types_.rst | 6 - .../cloud/spanner_admin_instance/__init__.py | 109 - .../spanner_admin_instance/gapic_version.py | 16 - .../cloud/spanner_admin_instance/py.typed | 2 - .../spanner_admin_instance_v1/__init__.py | 110 - .../gapic_metadata.json | 343 - .../gapic_version.py | 16 - .../cloud/spanner_admin_instance_v1/py.typed | 2 - .../services/__init__.py | 15 - .../services/instance_admin/__init__.py | 22 - .../services/instance_admin/async_client.py | 3468 --- .../services/instance_admin/client.py | 3849 --- .../services/instance_admin/pagers.py | 723 - .../instance_admin/transports/README.rst | 9 - .../instance_admin/transports/__init__.py | 38 - .../instance_admin/transports/base.py | 567 - .../instance_admin/transports/grpc.py | 1359 - .../instance_admin/transports/grpc_asyncio.py | 1560 -- .../instance_admin/transports/rest.py | 4462 ---- .../instance_admin/transports/rest_base.py | 1152 - .../types/__init__.py | 104 - .../spanner_admin_instance_v1/types/common.py | 99 - .../types/spanner_instance_admin.py | 2377 -- .../spanner_admin_instance/v1/mypy.ini | 3 - .../spanner_admin_instance/v1/noxfile.py | 601 - ...data_google.spanner.admin.instance.v1.json | 3450 --- ...ed_instance_admin_create_instance_async.py | 63 - ...ance_admin_create_instance_config_async.py | 57 - ...tance_admin_create_instance_config_sync.py | 57 - ...e_admin_create_instance_partition_async.py | 64 - ...ce_admin_create_instance_partition_sync.py | 64 - ...ted_instance_admin_create_instance_sync.py | 63 - ...ed_instance_admin_delete_instance_async.py | 50 - ...ance_admin_delete_instance_config_async.py | 50 - ...tance_admin_delete_instance_config_sync.py | 50 - ...e_admin_delete_instance_partition_async.py | 50 - ...ce_admin_delete_instance_partition_sync.py | 50 - ...ted_instance_admin_delete_instance_sync.py | 50 - ...ted_instance_admin_get_iam_policy_async.py | 53 - ...ated_instance_admin_get_iam_policy_sync.py | 53 - ...rated_instance_admin_get_instance_async.py | 52 - ...nstance_admin_get_instance_config_async.py | 52 - ...instance_admin_get_instance_config_sync.py | 52 - ...ance_admin_get_instance_partition_async.py | 52 - ...tance_admin_get_instance_partition_sync.py | 52 - ...erated_instance_admin_get_instance_sync.py | 52 - ...n_list_instance_config_operations_async.py | 53 - ...in_list_instance_config_operations_sync.py | 53 - ...tance_admin_list_instance_configs_async.py | 53 - ...stance_admin_list_instance_configs_sync.py | 53 - ...ist_instance_partition_operations_async.py | 53 - ...list_instance_partition_operations_sync.py | 53 - ...ce_admin_list_instance_partitions_async.py | 53 - ...nce_admin_list_instance_partitions_sync.py | 53 - ...ted_instance_admin_list_instances_async.py | 53 - ...ated_instance_admin_list_instances_sync.py | 53 - ...ated_instance_admin_move_instance_async.py | 57 - ...rated_instance_admin_move_instance_sync.py | 57 - ...ted_instance_admin_set_iam_policy_async.py | 53 - ...ated_instance_admin_set_iam_policy_sync.py | 53 - ...stance_admin_test_iam_permissions_async.py | 54 - ...nstance_admin_test_iam_permissions_sync.py | 54 - ...ed_instance_admin_update_instance_async.py | 61 - ...ance_admin_update_instance_config_async.py | 55 - ...tance_admin_update_instance_config_sync.py | 55 - ...e_admin_update_instance_partition_async.py | 62 - ...ce_admin_update_instance_partition_sync.py | 62 - ...ted_instance_admin_update_instance_sync.py | 61 - ...ixup_spanner_admin_instance_v1_keywords.py | 196 - .../spanner_admin_instance/v1/setup.py | 102 - .../v1/testing/constraints-3.10.txt | 9 - .../v1/testing/constraints-3.11.txt | 9 - .../v1/testing/constraints-3.12.txt | 9 - .../v1/testing/constraints-3.13.txt | 13 - .../v1/testing/constraints-3.14.txt | 13 - .../v1/testing/constraints-3.7.txt | 13 - .../v1/testing/constraints-3.8.txt | 9 - .../v1/testing/constraints-3.9.txt | 9 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../spanner_admin_instance_v1/__init__.py | 16 - .../test_instance_admin.py | 17636 ------------ 297 files changed, 11 insertions(+), 134784 deletions(-) delete mode 100644 owl-bot-staging/spanner/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner/v1/.flake8 delete mode 100644 owl-bot-staging/spanner/v1/LICENSE delete mode 100644 owl-bot-staging/spanner/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner/v1/README.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/spanner/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/spanner/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst delete mode 100644 owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py delete mode 100644 owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py delete mode 100644 owl-bot-staging/spanner/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py delete mode 100644 owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py delete mode 100644 owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py delete mode 100644 owl-bot-staging/spanner/v1/setup.py delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.14.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/spanner/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/spanner/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py delete mode 100644 owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner_admin_database/v1/.flake8 delete mode 100644 owl-bot-staging/spanner_admin_database/v1/LICENSE delete mode 100644 owl-bot-staging/spanner_admin_database/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner_admin_database/v1/README.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner_admin_database/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/setup.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/.coveragerc delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/.flake8 delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/LICENSE delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/README.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/conf.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/index.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/mypy.ini delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/noxfile.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/setup.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py delete mode 100644 owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py index 1e1509d1c4..be1822b33c 100644 --- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py @@ -1765,6 +1765,12 @@ class InstancePartition(proto.Message): that are not yet in the ``READY`` state. This field is a member of `oneof`_ ``compute_capacity``. + autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): + Optional. The autoscaling configuration. Autoscaling is + enabled if this field is set. When autoscaling is enabled, + fields in compute_capacity are treated as OUTPUT_ONLY fields + and reflect the current compute capacity allocated to the + instance partition. state (google.cloud.spanner_admin_instance_v1.types.InstancePartition.State): Output only. The current instance partition state. @@ -1848,6 +1854,11 @@ class State(proto.Enum): number=6, oneof="compute_capacity", ) + autoscaling_config: "AutoscalingConfig" = proto.Field( + proto.MESSAGE, + number=13, + message="AutoscalingConfig", + ) state: State = proto.Field( proto.ENUM, number=7, diff --git a/owl-bot-staging/spanner/v1/.coveragerc b/owl-bot-staging/spanner/v1/.coveragerc deleted file mode 100644 index 677a992799..0000000000 --- a/owl-bot-staging/spanner/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner/__init__.py - google/cloud/spanner/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/spanner/v1/.flake8 b/owl-bot-staging/spanner/v1/.flake8 deleted file mode 100644 index 90316de214..0000000000 --- a/owl-bot-staging/spanner/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/spanner/v1/LICENSE b/owl-bot-staging/spanner/v1/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/owl-bot-staging/spanner/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/spanner/v1/MANIFEST.in b/owl-bot-staging/spanner/v1/MANIFEST.in deleted file mode 100644 index dae249ec89..0000000000 --- a/owl-bot-staging/spanner/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/spanner/v1/README.rst b/owl-bot-staging/spanner/v1/README.rst deleted file mode 100644 index 502ebce1c4..0000000000 --- a/owl-bot-staging/spanner/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Spanner API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner/v1/docs/_static/custom.css b/owl-bot-staging/spanner/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b..0000000000 --- a/owl-bot-staging/spanner/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/spanner/v1/docs/_templates/layout.html b/owl-bot-staging/spanner/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcf..0000000000 --- a/owl-bot-staging/spanner/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/spanner/v1/docs/conf.py b/owl-bot-staging/spanner/v1/docs/conf.py deleted file mode 100644 index 010a6b6cda..0000000000 --- a/owl-bot-staging/spanner/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-spanner", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-spanner.tex", - u"google-cloud-spanner Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-spanner", - "google-cloud-spanner Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-spanner", - "google-cloud-spanner Documentation", - author, - "google-cloud-spanner", - "google-cloud-spanner Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner/v1/docs/index.rst b/owl-bot-staging/spanner/v1/docs/index.rst deleted file mode 100644 index 1162c85acc..0000000000 --- a/owl-bot-staging/spanner/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_v1/services_ - spanner_v1/types_ diff --git a/owl-bot-staging/spanner/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea..0000000000 --- a/owl-bot-staging/spanner/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst deleted file mode 100644 index 3bbbb55f79..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner v1 API -======================================== -.. toctree:: - :maxdepth: 2 - - spanner diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst deleted file mode 100644 index b51f4447e4..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/spanner.rst +++ /dev/null @@ -1,10 +0,0 @@ -Spanner -------------------------- - -.. automodule:: google.cloud.spanner_v1.services.spanner - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_v1.services.spanner.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst b/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst deleted file mode 100644 index c7ff7e6c71..0000000000 --- a/owl-bot-staging/spanner/v1/docs/spanner_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Spanner v1 API -===================================== - -.. automodule:: google.cloud.spanner_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py deleted file mode 100644 index 4757e26d69..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/__init__.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.spanner_v1.services.spanner.client import SpannerClient -from google.cloud.spanner_v1.services.spanner.async_client import SpannerAsyncClient - -from google.cloud.spanner_v1.types.change_stream import ChangeStreamRecord -from google.cloud.spanner_v1.types.commit_response import CommitResponse -from google.cloud.spanner_v1.types.keys import KeyRange -from google.cloud.spanner_v1.types.keys import KeySet -from google.cloud.spanner_v1.types.mutation import Mutation -from google.cloud.spanner_v1.types.query_plan import PlanNode -from google.cloud.spanner_v1.types.query_plan import QueryPlan -from google.cloud.spanner_v1.types.result_set import PartialResultSet -from google.cloud.spanner_v1.types.result_set import ResultSet -from google.cloud.spanner_v1.types.result_set import ResultSetMetadata -from google.cloud.spanner_v1.types.result_set import ResultSetStats -from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsRequest -from google.cloud.spanner_v1.types.spanner import BatchCreateSessionsResponse -from google.cloud.spanner_v1.types.spanner import BatchWriteRequest -from google.cloud.spanner_v1.types.spanner import BatchWriteResponse -from google.cloud.spanner_v1.types.spanner import BeginTransactionRequest -from google.cloud.spanner_v1.types.spanner import CommitRequest -from google.cloud.spanner_v1.types.spanner import CreateSessionRequest -from google.cloud.spanner_v1.types.spanner import DeleteSessionRequest -from google.cloud.spanner_v1.types.spanner import DirectedReadOptions -from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlRequest -from google.cloud.spanner_v1.types.spanner import ExecuteBatchDmlResponse -from google.cloud.spanner_v1.types.spanner import ExecuteSqlRequest -from google.cloud.spanner_v1.types.spanner import GetSessionRequest -from google.cloud.spanner_v1.types.spanner import ListSessionsRequest -from google.cloud.spanner_v1.types.spanner import ListSessionsResponse -from google.cloud.spanner_v1.types.spanner import Partition -from google.cloud.spanner_v1.types.spanner import PartitionOptions -from google.cloud.spanner_v1.types.spanner import PartitionQueryRequest -from google.cloud.spanner_v1.types.spanner import PartitionReadRequest -from google.cloud.spanner_v1.types.spanner import PartitionResponse -from google.cloud.spanner_v1.types.spanner import ReadRequest -from google.cloud.spanner_v1.types.spanner import RequestOptions -from google.cloud.spanner_v1.types.spanner import RollbackRequest -from google.cloud.spanner_v1.types.spanner import Session -from google.cloud.spanner_v1.types.transaction import MultiplexedSessionPrecommitToken -from google.cloud.spanner_v1.types.transaction import Transaction -from google.cloud.spanner_v1.types.transaction import TransactionOptions -from google.cloud.spanner_v1.types.transaction import TransactionSelector -from google.cloud.spanner_v1.types.type import StructType -from google.cloud.spanner_v1.types.type import Type -from google.cloud.spanner_v1.types.type import TypeAnnotationCode -from google.cloud.spanner_v1.types.type import TypeCode - -__all__ = ('SpannerClient', - 'SpannerAsyncClient', - 'ChangeStreamRecord', - 'CommitResponse', - 'KeyRange', - 'KeySet', - 'Mutation', - 'PlanNode', - 'QueryPlan', - 'PartialResultSet', - 'ResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - 'BeginTransactionRequest', - 'CommitRequest', - 'CreateSessionRequest', - 'DeleteSessionRequest', - 'DirectedReadOptions', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'ExecuteSqlRequest', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'Partition', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'PartitionResponse', - 'ReadRequest', - 'RequestOptions', - 'RollbackRequest', - 'Session', - 'MultiplexedSessionPrecommitToken', - 'Transaction', - 'TransactionOptions', - 'TransactionSelector', - 'StructType', - 'Type', - 'TypeAnnotationCode', - 'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed deleted file mode 100644 index 0989eccd04..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py deleted file mode 100644 index 6275c8acfb..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/__init__.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.spanner import SpannerClient -from .services.spanner import SpannerAsyncClient - -from .types.change_stream import ChangeStreamRecord -from .types.commit_response import CommitResponse -from .types.keys import KeyRange -from .types.keys import KeySet -from .types.mutation import Mutation -from .types.query_plan import PlanNode -from .types.query_plan import QueryPlan -from .types.result_set import PartialResultSet -from .types.result_set import ResultSet -from .types.result_set import ResultSetMetadata -from .types.result_set import ResultSetStats -from .types.spanner import BatchCreateSessionsRequest -from .types.spanner import BatchCreateSessionsResponse -from .types.spanner import BatchWriteRequest -from .types.spanner import BatchWriteResponse -from .types.spanner import BeginTransactionRequest -from .types.spanner import CommitRequest -from .types.spanner import CreateSessionRequest -from .types.spanner import DeleteSessionRequest -from .types.spanner import DirectedReadOptions -from .types.spanner import ExecuteBatchDmlRequest -from .types.spanner import ExecuteBatchDmlResponse -from .types.spanner import ExecuteSqlRequest -from .types.spanner import GetSessionRequest -from .types.spanner import ListSessionsRequest -from .types.spanner import ListSessionsResponse -from .types.spanner import Partition -from .types.spanner import PartitionOptions -from .types.spanner import PartitionQueryRequest -from .types.spanner import PartitionReadRequest -from .types.spanner import PartitionResponse -from .types.spanner import ReadRequest -from .types.spanner import RequestOptions -from .types.spanner import RollbackRequest -from .types.spanner import Session -from .types.transaction import MultiplexedSessionPrecommitToken -from .types.transaction import Transaction -from .types.transaction import TransactionOptions -from .types.transaction import TransactionSelector -from .types.type import StructType -from .types.type import Type -from .types.type import TypeAnnotationCode -from .types.type import TypeCode - -__all__ = ( - 'SpannerAsyncClient', -'BatchCreateSessionsRequest', -'BatchCreateSessionsResponse', -'BatchWriteRequest', -'BatchWriteResponse', -'BeginTransactionRequest', -'ChangeStreamRecord', -'CommitRequest', -'CommitResponse', -'CreateSessionRequest', -'DeleteSessionRequest', -'DirectedReadOptions', -'ExecuteBatchDmlRequest', -'ExecuteBatchDmlResponse', -'ExecuteSqlRequest', -'GetSessionRequest', -'KeyRange', -'KeySet', -'ListSessionsRequest', -'ListSessionsResponse', -'MultiplexedSessionPrecommitToken', -'Mutation', -'PartialResultSet', -'Partition', -'PartitionOptions', -'PartitionQueryRequest', -'PartitionReadRequest', -'PartitionResponse', -'PlanNode', -'QueryPlan', -'ReadRequest', -'RequestOptions', -'ResultSet', -'ResultSetMetadata', -'ResultSetStats', -'RollbackRequest', -'Session', -'SpannerClient', -'StructType', -'Transaction', -'TransactionOptions', -'TransactionSelector', -'Type', -'TypeAnnotationCode', -'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json deleted file mode 100644 index f5957c633a..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_metadata.json +++ /dev/null @@ -1,268 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_v1", - "protoPackage": "google.spanner.v1", - "schema": "1.0", - "services": { - "Spanner": { - "clients": { - "grpc": { - "libraryClient": "SpannerClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SpannerAsyncClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - }, - "rest": { - "libraryClient": "SpannerClient", - "rpcs": { - "BatchCreateSessions": { - "methods": [ - "batch_create_sessions" - ] - }, - "BatchWrite": { - "methods": [ - "batch_write" - ] - }, - "BeginTransaction": { - "methods": [ - "begin_transaction" - ] - }, - "Commit": { - "methods": [ - "commit" - ] - }, - "CreateSession": { - "methods": [ - "create_session" - ] - }, - "DeleteSession": { - "methods": [ - "delete_session" - ] - }, - "ExecuteBatchDml": { - "methods": [ - "execute_batch_dml" - ] - }, - "ExecuteSql": { - "methods": [ - "execute_sql" - ] - }, - "ExecuteStreamingSql": { - "methods": [ - "execute_streaming_sql" - ] - }, - "GetSession": { - "methods": [ - "get_session" - ] - }, - "ListSessions": { - "methods": [ - "list_sessions" - ] - }, - "PartitionQuery": { - "methods": [ - "partition_query" - ] - }, - "PartitionRead": { - "methods": [ - "partition_read" - ] - }, - "Read": { - "methods": [ - "read" - ] - }, - "Rollback": { - "methods": [ - "rollback" - ] - }, - "StreamingRead": { - "methods": [ - "streaming_read" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed deleted file mode 100644 index 0989eccd04..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner package uses inline types. diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py deleted file mode 100644 index a7de7a7b93..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SpannerClient -from .async_client import SpannerAsyncClient - -__all__ = ( - 'SpannerClient', - 'SpannerAsyncClient', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py deleted file mode 100644 index e3fe8cabac..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/async_client.py +++ /dev/null @@ -1,2125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, Sequence, Tuple, Type, Union - -from google.cloud.spanner_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport -from .client import SpannerClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class SpannerAsyncClient: - """Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - _client: SpannerClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = SpannerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SpannerClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = SpannerClient._DEFAULT_UNIVERSE - - database_path = staticmethod(SpannerClient.database_path) - parse_database_path = staticmethod(SpannerClient.parse_database_path) - session_path = staticmethod(SpannerClient.session_path) - parse_session_path = staticmethod(SpannerClient.parse_session_path) - common_billing_account_path = staticmethod(SpannerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SpannerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(SpannerClient.common_folder_path) - parse_common_folder_path = staticmethod(SpannerClient.parse_common_folder_path) - common_organization_path = staticmethod(SpannerClient.common_organization_path) - parse_common_organization_path = staticmethod(SpannerClient.parse_common_organization_path) - common_project_path = staticmethod(SpannerClient.common_project_path) - parse_common_project_path = staticmethod(SpannerClient.parse_common_project_path) - common_location_path = staticmethod(SpannerClient.common_location_path) - parse_common_location_path = staticmethod(SpannerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerAsyncClient: The constructed client. - """ - return SpannerClient.from_service_account_info.__func__(SpannerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerAsyncClient: The constructed client. - """ - return SpannerClient.from_service_account_file.__func__(SpannerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return SpannerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> SpannerTransport: - """Returns the transport used by the client instance. - - Returns: - SpannerTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = SpannerClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the spanner async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the SpannerTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SpannerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner_v1.SpannerAsyncClient`.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.spanner.v1.Spanner", - "credentialsType": None, - } - ) - - async def create_session(self, - request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_create_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = await client.create_session(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]]): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - database (:class:`str`): - Required. The database in which the - new session is created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CreateSessionRequest): - request = spanner.CreateSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_create_sessions(self, - request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - session_count: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.BatchCreateSessionsResponse: - r"""Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = await client.batch_create_sessions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]]): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - database (:class:`str`): - Required. The database in which the - new sessions are created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - session_count (:class:`int`): - Required. The number of sessions to be created in this - batch call. The API can return fewer than the requested - number of sessions. If a specific number of sessions are - desired, the client can make additional calls to - ``BatchCreateSessions`` (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - - This corresponds to the ``session_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, session_count] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchCreateSessionsRequest): - request = spanner.BatchCreateSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if session_count is not None: - request.session_count = session_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_create_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_session(self, - request: Optional[Union[spanner.GetSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_get_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_session(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]]): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - name (:class:`str`): - Required. The name of the session to - retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.GetSessionRequest): - request = spanner.GetSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_sessions(self, - request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSessionsAsyncPager: - r"""Lists all sessions in a given database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]]): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - database (:class:`str`): - Required. The database in which to - list sessions. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ListSessionsRequest): - request = spanner.ListSessionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSessionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_session(self, - request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - await client.delete_session(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]]): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - name (:class:`str`): - Required. The name of the session to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.DeleteSessionRequest): - request = spanner.DeleteSessionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def execute_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.execute_sql(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.execute_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_streaming_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: - r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = await client.execute_streaming_sql(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.execute_streaming_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def execute_batch_dml(self, - request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = await client.execute_batch_dml(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]]): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of - [ResultSet][google.spanner.v1.ResultSet] messages, - one for each DML statement that has successfully - executed, in the same order as the statements in the - request. If a statement fails, the status in the - response body identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. 2. If the status was not OK, check the - number of result sets in the response. If the - response contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed - successfully. - - \* Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. - - Example 2: - - - Request: 5 DML statements. The third statement has - a syntax error. - - \* Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement failed, - and the fourth and fifth statements were not - executed. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteBatchDmlRequest): - request = spanner.ExecuteBatchDmlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.execute_batch_dml] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = await client.read(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[result_set.PartialResultSet]]: - r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = await client.streaming_read(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.ReadRequest, dict]]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.streaming_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def begin_transaction(self, - request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, - *, - session: Optional[str] = None, - options: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transaction.Transaction: - r"""Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]]): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - session (:class:`str`): - Required. The session in which the - transaction runs. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - options (:class:`google.cloud.spanner_v1.types.TransactionOptions`): - Required. Options for the new - transaction. - - This corresponds to the ``options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Transaction: - A transaction. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, options] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BeginTransactionRequest): - request = spanner.BeginTransactionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if options is not None: - request.options = options - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def commit(self, - request: Optional[Union[spanner.CommitRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - mutations: Optional[MutableSequence[mutation.Mutation]] = None, - single_use_transaction: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> commit_response.CommitResponse: - r"""Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_commit(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.CommitRequest, dict]]): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - session (:class:`str`): - Required. The session in which the - transaction to be committed is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (:class:`bytes`): - Commit a previously-started - transaction. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (:class:`MutableSequence[google.cloud.spanner_v1.types.Mutation]`): - The mutations to be executed when - this transaction commits. All mutations - are applied atomically, in the order - they appear in this list. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - single_use_transaction (:class:`google.cloud.spanner_v1.types.TransactionOptions`): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, - commit with a temporary transaction is non-idempotent. - That is, if the ``CommitRequest`` is sent to Cloud - Spanner more than once (for instance, due to retries in - the application, or in the transport library), it's - possible that the mutations are executed more than once. - If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This corresponds to the ``single_use_transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id, mutations, single_use_transaction] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CommitRequest): - request = spanner.CommitRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - if single_use_transaction is not None: - request.single_use_transaction = single_use_transaction - if mutations: - request.mutations.extend(mutations) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rollback(self, - request: Optional[Union[spanner.RollbackRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_rollback(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - await client.rollback(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.RollbackRequest, dict]]): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - session (:class:`str`): - Required. The session in which the - transaction to roll back is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (:class:`bytes`): - Required. The transaction to roll - back. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.RollbackRequest): - request = spanner.RollbackRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def partition_query(self, - request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.partition_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]]): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionQueryRequest): - request = spanner.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def partition_read(self, - request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = await client.partition_read(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]]): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionReadRequest): - request = spanner.PartitionReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.partition_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_write(self, - request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, - *, - session: Optional[str] = None, - mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[spanner.BatchWriteResponse]]: - r"""Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - async def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = await client.batch_write(request=request) - - # Handle the response - async for response in stream: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]]): - The request object. The request for - [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - session (:class:`str`): - Required. The session in which the - batch request is to be run. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutation_groups (:class:`MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]`): - Required. The groups of mutations to - be applied. - - This corresponds to the ``mutation_groups`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - AsyncIterable[google.cloud.spanner_v1.types.BatchWriteResponse]: - The result of applying a batch of - mutations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, mutation_groups] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchWriteRequest): - request = spanner.BatchWriteRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if mutation_groups: - request.mutation_groups.extend(mutation_groups) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "SpannerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "SpannerAsyncClient", -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py deleted file mode 100644 index bd4ac8451f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/client.py +++ /dev/null @@ -1,2486 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.spanner_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import SpannerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import SpannerGrpcTransport -from .transports.grpc_asyncio import SpannerGrpcAsyncIOTransport -from .transports.rest import SpannerRestTransport - - -class SpannerClientMeta(type): - """Metaclass for the Spanner client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] - _transport_registry["grpc"] = SpannerGrpcTransport - _transport_registry["grpc_asyncio"] = SpannerGrpcAsyncIOTransport - _transport_registry["rest"] = SpannerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[SpannerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SpannerClient(metaclass=SpannerClientMeta): - """Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SpannerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SpannerTransport: - """Returns the transport used by the client instance. - - Returns: - SpannerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def database_path(project: str,instance: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def session_path(project: str,instance: str,database: str,session: str,) -> str: - """Returns a fully-qualified session string.""" - return "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) - - @staticmethod - def parse_session_path(path: str) -> Dict[str,str]: - """Parses a session path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/sessions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = SpannerClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = SpannerClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = SpannerClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SpannerTransport, Callable[..., SpannerTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the spanner client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,SpannerTransport,Callable[..., SpannerTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the SpannerTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = SpannerClient._read_environment_variables() - self._client_cert_source = SpannerClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = SpannerClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, SpannerTransport) - if transport_provided: - # transport is a SpannerTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(SpannerTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - SpannerClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[SpannerTransport], Callable[..., SpannerTransport]] = ( - SpannerClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., SpannerTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner_v1.SpannerClient`.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.spanner.v1.Spanner", - "credentialsType": None, - } - ) - - def create_session(self, - request: Optional[Union[spanner.CreateSessionRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_create_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = client.create_session(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.CreateSessionRequest, dict]): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - database (str): - Required. The database in which the - new session is created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CreateSessionRequest): - request = spanner.CreateSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_create_sessions(self, - request: Optional[Union[spanner.BatchCreateSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - session_count: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.BatchCreateSessionsResponse: - r"""Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = client.batch_create_sessions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.BatchCreateSessionsRequest, dict]): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - database (str): - Required. The database in which the - new sessions are created. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - session_count (int): - Required. The number of sessions to be created in this - batch call. The API can return fewer than the requested - number of sessions. If a specific number of sessions are - desired, the client can make additional calls to - ``BatchCreateSessions`` (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - - This corresponds to the ``session_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, session_count] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchCreateSessionsRequest): - request = spanner.BatchCreateSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if session_count is not None: - request.session_count = session_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_create_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_session(self, - request: Optional[Union[spanner.GetSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.Session: - r"""Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_get_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = client.get_session(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.GetSessionRequest, dict]): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - name (str): - Required. The name of the session to - retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Session: - A session in the Cloud Spanner API. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.GetSessionRequest): - request = spanner.GetSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_sessions(self, - request: Optional[Union[spanner.ListSessionsRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSessionsPager: - r"""Lists all sessions in a given database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ListSessionsRequest, dict]): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - database (str): - Required. The database in which to - list sessions. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ListSessionsRequest): - request = spanner.ListSessionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_sessions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSessionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_session(self, - request: Optional[Union[spanner.DeleteSessionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - client.delete_session(request=request) - - Args: - request (Union[google.cloud.spanner_v1.types.DeleteSessionRequest, dict]): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - name (str): - Required. The name of the session to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.DeleteSessionRequest): - request = spanner.DeleteSessionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_session] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def execute_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.execute_sql(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_streaming_sql(self, - request: Optional[Union[spanner.ExecuteSqlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[result_set.PartialResultSet]: - r"""Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = client.execute_streaming_sql(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteSqlRequest, dict]): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteSqlRequest): - request = spanner.ExecuteSqlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_streaming_sql] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def execute_batch_dml(self, - request: Optional[Union[spanner.ExecuteBatchDmlRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = client.execute_batch_dml(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest, dict]): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ExecuteBatchDmlResponse: - The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of - [ResultSet][google.spanner.v1.ResultSet] messages, - one for each DML statement that has successfully - executed, in the same order as the statements in the - request. If a statement fails, the status in the - response body identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value OK - indicates that all statements were executed - successfully. 2. If the status was not OK, check the - number of result sets in the response. If the - response contains N - [ResultSet][google.spanner.v1.ResultSet] messages, - then statement N+1 in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed - successfully. - - \* Response: 5 - [ResultSet][google.spanner.v1.ResultSet] messages, - with the status OK. - - Example 2: - - - Request: 5 DML statements. The third statement has - a syntax error. - - \* Response: 2 - [ResultSet][google.spanner.v1.ResultSet] messages, - and a syntax error (INVALID_ARGUMENT) status. The - number of [ResultSet][google.spanner.v1.ResultSet] - messages indicates that the third statement failed, - and the fourth and fifth statements were not - executed. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ExecuteBatchDmlRequest): - request = spanner.ExecuteBatchDmlRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.execute_batch_dml] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> result_set.ResultSet: - r"""Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = client.read(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_read(self, - request: Optional[Union[spanner.ReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[result_set.PartialResultSet]: - r"""Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = client.streaming_read(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.ReadRequest, dict]): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.spanner_v1.types.PartialResultSet]: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.ReadRequest): - request = spanner.ReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.streaming_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def begin_transaction(self, - request: Optional[Union[spanner.BeginTransactionRequest, dict]] = None, - *, - session: Optional[str] = None, - options: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transaction.Transaction: - r"""Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.BeginTransactionRequest, dict]): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - session (str): - Required. The session in which the - transaction runs. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - options (google.cloud.spanner_v1.types.TransactionOptions): - Required. Options for the new - transaction. - - This corresponds to the ``options`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.Transaction: - A transaction. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, options] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BeginTransactionRequest): - request = spanner.BeginTransactionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if options is not None: - request.options = options - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.begin_transaction] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def commit(self, - request: Optional[Union[spanner.CommitRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - mutations: Optional[MutableSequence[mutation.Mutation]] = None, - single_use_transaction: Optional[transaction.TransactionOptions] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> commit_response.CommitResponse: - r"""Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_commit(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.CommitRequest, dict]): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - session (str): - Required. The session in which the - transaction to be committed is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (bytes): - Commit a previously-started - transaction. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): - The mutations to be executed when - this transaction commits. All mutations - are applied atomically, in the order - they appear in this list. - - This corresponds to the ``mutations`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, - commit with a temporary transaction is non-idempotent. - That is, if the ``CommitRequest`` is sent to Cloud - Spanner more than once (for instance, due to retries in - the application, or in the transport library), it's - possible that the mutations are executed more than once. - If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This corresponds to the ``single_use_transaction`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id, mutations, single_use_transaction] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.CommitRequest): - request = spanner.CommitRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - if mutations is not None: - request.mutations = mutations - if single_use_transaction is not None: - request.single_use_transaction = single_use_transaction - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.commit] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rollback(self, - request: Optional[Union[spanner.RollbackRequest, dict]] = None, - *, - session: Optional[str] = None, - transaction_id: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_rollback(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - client.rollback(request=request) - - Args: - request (Union[google.cloud.spanner_v1.types.RollbackRequest, dict]): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - session (str): - Required. The session in which the - transaction to roll back is running. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transaction_id (bytes): - Required. The transaction to roll - back. - - This corresponds to the ``transaction_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, transaction_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.RollbackRequest): - request = spanner.RollbackRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if transaction_id is not None: - request.transaction_id = transaction_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rollback] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def partition_query(self, - request: Optional[Union[spanner.PartitionQueryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.partition_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionQueryRequest, dict]): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionQueryRequest): - request = spanner.PartitionQueryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def partition_read(self, - request: Optional[Union[spanner.PartitionReadRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner.PartitionResponse: - r"""Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = client.partition_read(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.PartitionReadRequest, dict]): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_v1.types.PartitionResponse: - The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.PartitionReadRequest): - request = spanner.PartitionReadRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.partition_read] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_write(self, - request: Optional[Union[spanner.BatchWriteRequest, dict]] = None, - *, - session: Optional[str] = None, - mutation_groups: Optional[MutableSequence[spanner.BatchWriteRequest.MutationGroup]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[spanner.BatchWriteResponse]: - r"""Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_v1 - - def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = client.batch_write(request=request) - - # Handle the response - for response in stream: - print(response) - - Args: - request (Union[google.cloud.spanner_v1.types.BatchWriteRequest, dict]): - The request object. The request for - [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - session (str): - Required. The session in which the - batch request is to be run. - - This corresponds to the ``session`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): - Required. The groups of mutations to - be applied. - - This corresponds to the ``mutation_groups`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]: - The result of applying a batch of - mutations. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [session, mutation_groups] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner.BatchWriteRequest): - request = spanner.BatchWriteRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if session is not None: - request.session = session - if mutation_groups is not None: - request.mutation_groups = mutation_groups - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_write] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("session", request.session), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "SpannerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "SpannerClient", -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py deleted file mode 100644 index 0969af8f6f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_v1.types import spanner - - -class ListSessionsPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner.ListSessionsResponse], - request: spanner.ListSessionsRequest, - response: spanner.ListSessionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.spanner_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner.Session]: - for page in self.pages: - yield from page.sessions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSessionsAsyncPager: - """A pager for iterating through ``list_sessions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_v1.types.ListSessionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``sessions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSessions`` requests and continue to iterate - through the ``sessions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_v1.types.ListSessionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner.ListSessionsResponse]], - request: spanner.ListSessionsRequest, - response: spanner.ListSessionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_v1.types.ListSessionsRequest): - The initial request object. - response (google.cloud.spanner_v1.types.ListSessionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner.ListSessionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner.ListSessionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner.Session]: - async def async_generator(): - async for page in self.pages: - for response in page.sessions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst deleted file mode 100644 index 99997401d5..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`SpannerTransport` is the ABC for all transports. -- public child `SpannerGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `SpannerGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseSpannerRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `SpannerRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py deleted file mode 100644 index ee0b1ae37f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SpannerTransport -from .grpc import SpannerGrpcTransport -from .grpc_asyncio import SpannerGrpcAsyncIOTransport -from .rest import SpannerRestTransport -from .rest import SpannerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SpannerTransport]] -_transport_registry['grpc'] = SpannerGrpcTransport -_transport_registry['grpc_asyncio'] = SpannerGrpcAsyncIOTransport -_transport_registry['rest'] = SpannerRestTransport - -__all__ = ( - 'SpannerTransport', - 'SpannerGrpcTransport', - 'SpannerGrpcAsyncIOTransport', - 'SpannerRestTransport', - 'SpannerRestInterceptor', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py deleted file mode 100644 index 0c7b14364b..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/base.py +++ /dev/null @@ -1,505 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.spanner_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class SpannerTransport(abc.ABC): - """Abstract transport class for Spanner.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_session: gapic_v1.method.wrap_method( - self.create_session, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_create_sessions: gapic_v1.method.wrap_method( - self.batch_create_sessions, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_session: gapic_v1.method.wrap_method( - self.get_session, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_sessions: gapic_v1.method.wrap_method( - self.list_sessions, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_session: gapic_v1.method.wrap_method( - self.delete_session, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_sql: gapic_v1.method.wrap_method( - self.execute_sql, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_streaming_sql: gapic_v1.method.wrap_method( - self.execute_streaming_sql, - default_timeout=3600.0, - client_info=client_info, - ), - self.execute_batch_dml: gapic_v1.method.wrap_method( - self.execute_batch_dml, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.read: gapic_v1.method.wrap_method( - self.read, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.streaming_read: gapic_v1.method.wrap_method( - self.streaming_read, - default_timeout=3600.0, - client_info=client_info, - ), - self.begin_transaction: gapic_v1.method.wrap_method( - self.begin_transaction, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.commit: gapic_v1.method.wrap_method( - self.commit, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.rollback: gapic_v1.method.wrap_method( - self.rollback, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_query: gapic_v1.method.wrap_method( - self.partition_query, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_read: gapic_v1.method.wrap_method( - self.partition_read, - default_retry=retries.Retry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_write: gapic_v1.method.wrap_method( - self.batch_write, - default_timeout=3600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - Union[ - spanner.Session, - Awaitable[spanner.Session] - ]]: - raise NotImplementedError() - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - Union[ - spanner.BatchCreateSessionsResponse, - Awaitable[spanner.BatchCreateSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - Union[ - spanner.Session, - Awaitable[spanner.Session] - ]]: - raise NotImplementedError() - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - Union[ - spanner.ListSessionsResponse, - Awaitable[spanner.ListSessionsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Union[ - result_set.ResultSet, - Awaitable[result_set.ResultSet] - ]]: - raise NotImplementedError() - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Union[ - result_set.PartialResultSet, - Awaitable[result_set.PartialResultSet] - ]]: - raise NotImplementedError() - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - Union[ - spanner.ExecuteBatchDmlResponse, - Awaitable[spanner.ExecuteBatchDmlResponse] - ]]: - raise NotImplementedError() - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - Union[ - result_set.ResultSet, - Awaitable[result_set.ResultSet] - ]]: - raise NotImplementedError() - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - Union[ - result_set.PartialResultSet, - Awaitable[result_set.PartialResultSet] - ]]: - raise NotImplementedError() - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - Union[ - transaction.Transaction, - Awaitable[transaction.Transaction] - ]]: - raise NotImplementedError() - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - Union[ - commit_response.CommitResponse, - Awaitable[commit_response.CommitResponse] - ]]: - raise NotImplementedError() - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - Union[ - spanner.PartitionResponse, - Awaitable[spanner.PartitionResponse] - ]]: - raise NotImplementedError() - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - Union[ - spanner.PartitionResponse, - Awaitable[spanner.PartitionResponse] - ]]: - raise NotImplementedError() - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - Union[ - spanner.BatchWriteResponse, - Awaitable[spanner.BatchWriteResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'SpannerTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py deleted file mode 100644 index 064199f57e..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc.py +++ /dev/null @@ -1,899 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore -from .base import SpannerTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class SpannerGrpcTransport(SpannerTransport): - """gRPC backend transport for Spanner. - - Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - spanner.Session]: - r"""Return a callable for the create session method over gRPC. - - Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - Returns: - Callable[[~.CreateSessionRequest], - ~.Session]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_session' not in self._stubs: - self._stubs['create_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=spanner.CreateSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['create_session'] - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - spanner.BatchCreateSessionsResponse]: - r"""Return a callable for the batch create sessions method over gRPC. - - Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Returns: - Callable[[~.BatchCreateSessionsRequest], - ~.BatchCreateSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_sessions' not in self._stubs: - self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=spanner.BatchCreateSessionsRequest.serialize, - response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, - ) - return self._stubs['batch_create_sessions'] - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - spanner.Session]: - r"""Return a callable for the get session method over gRPC. - - Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - Returns: - Callable[[~.GetSessionRequest], - ~.Session]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_session' not in self._stubs: - self._stubs['get_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=spanner.GetSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['get_session'] - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - spanner.ListSessionsResponse]: - r"""Return a callable for the list sessions method over gRPC. - - Lists all sessions in a given database. - - Returns: - Callable[[~.ListSessionsRequest], - ~.ListSessionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=spanner.ListSessionsRequest.serialize, - response_deserializer=spanner.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete session method over gRPC. - - Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - Returns: - Callable[[~.DeleteSessionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_session' not in self._stubs: - self._stubs['delete_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=spanner.DeleteSessionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_session'] - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.ResultSet]: - r"""Return a callable for the execute sql method over gRPC. - - Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - ~.ResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_sql' not in self._stubs: - self._stubs['execute_sql'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['execute_sql'] - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.PartialResultSet]: - r"""Return a callable for the execute streaming sql method over gRPC. - - Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - ~.PartialResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_streaming_sql' not in self._stubs: - self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['execute_streaming_sql'] - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - spanner.ExecuteBatchDmlResponse]: - r"""Return a callable for the execute batch dml method over gRPC. - - Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Returns: - Callable[[~.ExecuteBatchDmlRequest], - ~.ExecuteBatchDmlResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_batch_dml' not in self._stubs: - self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=spanner.ExecuteBatchDmlRequest.serialize, - response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, - ) - return self._stubs['execute_batch_dml'] - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - result_set.ResultSet]: - r"""Return a callable for the read method over gRPC. - - Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Returns: - Callable[[~.ReadRequest], - ~.ResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'read' not in self._stubs: - self._stubs['read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['read'] - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - result_set.PartialResultSet]: - r"""Return a callable for the streaming read method over gRPC. - - Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Returns: - Callable[[~.ReadRequest], - ~.PartialResultSet]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_read' not in self._stubs: - self._stubs['streaming_read'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['streaming_read'] - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - transaction.Transaction]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Returns: - Callable[[~.BeginTransactionRequest], - ~.Transaction]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=spanner.BeginTransactionRequest.serialize, - response_deserializer=transaction.Transaction.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - commit_response.CommitResponse]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Returns: - Callable[[~.CommitRequest], - ~.CommitResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=spanner.CommitRequest.serialize, - response_deserializer=commit_response.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - empty_pb2.Empty]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - Returns: - Callable[[~.RollbackRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=spanner.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - spanner.PartitionResponse]: - r"""Return a callable for the partition query method over gRPC. - - Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionQueryRequest], - ~.PartitionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=spanner.PartitionQueryRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - spanner.PartitionResponse]: - r"""Return a callable for the partition read method over gRPC. - - Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionReadRequest], - ~.PartitionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_read' not in self._stubs: - self._stubs['partition_read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=spanner.PartitionReadRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_read'] - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - spanner.BatchWriteResponse]: - r"""Return a callable for the batch write method over gRPC. - - Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - Returns: - Callable[[~.BatchWriteRequest], - ~.BatchWriteResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_write' not in self._stubs: - self._stubs['batch_write'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/BatchWrite', - request_serializer=spanner.BatchWriteRequest.serialize, - response_deserializer=spanner.BatchWriteResponse.deserialize, - ) - return self._stubs['batch_write'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'SpannerGrpcTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py deleted file mode 100644 index 3efcf7071f..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py +++ /dev/null @@ -1,1125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore -from .base import SpannerTransport, DEFAULT_CLIENT_INFO -from .grpc import SpannerGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class SpannerGrpcAsyncIOTransport(SpannerTransport): - """gRPC AsyncIO backend transport for Spanner. - - Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - Awaitable[spanner.Session]]: - r"""Return a callable for the create session method over gRPC. - - Creates a new session. A session can be used to perform - transactions that read and/or modify data in a Cloud Spanner - database. Sessions are meant to be reused for many consecutive - transactions. - - Sessions can only execute one transaction at a time. To execute - multiple concurrent read-write/write-only transactions, create - multiple sessions. Note that standalone reads and queries use a - transaction internally, and count toward the one transaction - limit. - - Active sessions use additional server resources, so it's a good - idea to delete idle and unneeded sessions. Aside from explicit - deletes, Cloud Spanner can delete sessions when no operations - are sent for more than an hour. If a session is deleted, - requests to it return ``NOT_FOUND``. - - Idle sessions can be kept alive by sending a trivial SQL query - periodically, for example, ``"SELECT 1"``. - - Returns: - Callable[[~.CreateSessionRequest], - Awaitable[~.Session]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_session' not in self._stubs: - self._stubs['create_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/CreateSession', - request_serializer=spanner.CreateSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['create_session'] - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - Awaitable[spanner.BatchCreateSessionsResponse]]: - r"""Return a callable for the batch create sessions method over gRPC. - - Creates multiple new sessions. - - This API can be used to initialize a session cache on - the clients. See https://goo.gl/TgSFN2 for best - practices on session cache management. - - Returns: - Callable[[~.BatchCreateSessionsRequest], - Awaitable[~.BatchCreateSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_create_sessions' not in self._stubs: - self._stubs['batch_create_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BatchCreateSessions', - request_serializer=spanner.BatchCreateSessionsRequest.serialize, - response_deserializer=spanner.BatchCreateSessionsResponse.deserialize, - ) - return self._stubs['batch_create_sessions'] - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - Awaitable[spanner.Session]]: - r"""Return a callable for the get session method over gRPC. - - Gets a session. Returns ``NOT_FOUND`` if the session doesn't - exist. This is mainly useful for determining whether a session - is still alive. - - Returns: - Callable[[~.GetSessionRequest], - Awaitable[~.Session]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_session' not in self._stubs: - self._stubs['get_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/GetSession', - request_serializer=spanner.GetSessionRequest.serialize, - response_deserializer=spanner.Session.deserialize, - ) - return self._stubs['get_session'] - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - Awaitable[spanner.ListSessionsResponse]]: - r"""Return a callable for the list sessions method over gRPC. - - Lists all sessions in a given database. - - Returns: - Callable[[~.ListSessionsRequest], - Awaitable[~.ListSessionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_sessions' not in self._stubs: - self._stubs['list_sessions'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ListSessions', - request_serializer=spanner.ListSessionsRequest.serialize, - response_deserializer=spanner.ListSessionsResponse.deserialize, - ) - return self._stubs['list_sessions'] - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete session method over gRPC. - - Ends a session, releasing server resources associated - with it. This asynchronously triggers the cancellation - of any operations that are running with this session. - - Returns: - Callable[[~.DeleteSessionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_session' not in self._stubs: - self._stubs['delete_session'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/DeleteSession', - request_serializer=spanner.DeleteSessionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_session'] - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Awaitable[result_set.ResultSet]]: - r"""Return a callable for the execute sql method over gRPC. - - Executes an SQL statement, returning all results in a single - reply. This method can't be used to return a result set larger - than 10 MiB; if the query yields more data than that, the query - fails with a ``FAILED_PRECONDITION`` error. - - Operations inside read-write transactions might return - ``ABORTED``. If this occurs, the application should restart the - transaction from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be fetched in streaming fashion by - calling - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - instead. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - Awaitable[~.ResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_sql' not in self._stubs: - self._stubs['execute_sql'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['execute_sql'] - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - Awaitable[result_set.PartialResultSet]]: - r"""Return a callable for the execute streaming sql method over gRPC. - - Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except - returns the result set as a stream. Unlike - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no - limit on the size of the returned result set. However, no - individual row in the result set can exceed 100 MiB, and no - column value can exceed 10 MiB. - - The query string can be SQL or `Graph Query Language - (GQL) `__. - - Returns: - Callable[[~.ExecuteSqlRequest], - Awaitable[~.PartialResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_streaming_sql' not in self._stubs: - self._stubs['execute_streaming_sql'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/ExecuteStreamingSql', - request_serializer=spanner.ExecuteSqlRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['execute_streaming_sql'] - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - Awaitable[spanner.ExecuteBatchDmlResponse]]: - r"""Return a callable for the execute batch dml method over gRPC. - - Executes a batch of SQL DML statements. This method allows many - statements to be run with lower latency than submitting them - sequentially with - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Statements are executed in sequential order. A request can - succeed even if a statement fails. The - [ExecuteBatchDmlResponse.status][google.spanner.v1.ExecuteBatchDmlResponse.status] - field in the response provides information about the statement - that failed. Clients must inspect this field to determine - whether an error occurred. - - Execution stops after the first failed statement; the remaining - statements are not executed. - - Returns: - Callable[[~.ExecuteBatchDmlRequest], - Awaitable[~.ExecuteBatchDmlResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'execute_batch_dml' not in self._stubs: - self._stubs['execute_batch_dml'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/ExecuteBatchDml', - request_serializer=spanner.ExecuteBatchDmlRequest.serialize, - response_deserializer=spanner.ExecuteBatchDmlResponse.deserialize, - ) - return self._stubs['execute_batch_dml'] - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - Awaitable[result_set.ResultSet]]: - r"""Return a callable for the read method over gRPC. - - Reads rows from the database using key lookups and scans, as a - simple key/value style alternative to - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method - can't be used to return a result set larger than 10 MiB; if the - read matches more data than that, the read fails with a - ``FAILED_PRECONDITION`` error. - - Reads inside read-write transactions might return ``ABORTED``. - If this occurs, the application should restart the transaction - from the beginning. See - [Transaction][google.spanner.v1.Transaction] for more details. - - Larger result sets can be yielded in streaming fashion by - calling [StreamingRead][google.spanner.v1.Spanner.StreamingRead] - instead. - - Returns: - Callable[[~.ReadRequest], - Awaitable[~.ResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'read' not in self._stubs: - self._stubs['read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Read', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.ResultSet.deserialize, - ) - return self._stubs['read'] - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - Awaitable[result_set.PartialResultSet]]: - r"""Return a callable for the streaming read method over gRPC. - - Like [Read][google.spanner.v1.Spanner.Read], except returns the - result set as a stream. Unlike - [Read][google.spanner.v1.Spanner.Read], there is no limit on the - size of the returned result set. However, no individual row in - the result set can exceed 100 MiB, and no column value can - exceed 10 MiB. - - Returns: - Callable[[~.ReadRequest], - Awaitable[~.PartialResultSet]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_read' not in self._stubs: - self._stubs['streaming_read'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/StreamingRead', - request_serializer=spanner.ReadRequest.serialize, - response_deserializer=result_set.PartialResultSet.deserialize, - ) - return self._stubs['streaming_read'] - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - Awaitable[transaction.Transaction]]: - r"""Return a callable for the begin transaction method over gRPC. - - Begins a new transaction. This step can often be skipped: - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [Commit][google.spanner.v1.Spanner.Commit] can begin a new - transaction as a side-effect. - - Returns: - Callable[[~.BeginTransactionRequest], - Awaitable[~.Transaction]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'begin_transaction' not in self._stubs: - self._stubs['begin_transaction'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/BeginTransaction', - request_serializer=spanner.BeginTransactionRequest.serialize, - response_deserializer=transaction.Transaction.deserialize, - ) - return self._stubs['begin_transaction'] - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - Awaitable[commit_response.CommitResponse]]: - r"""Return a callable for the commit method over gRPC. - - Commits a transaction. The request includes the mutations to be - applied to rows in the database. - - ``Commit`` might return an ``ABORTED`` error. This can occur at - any time; commonly, the cause is conflicts with concurrent - transactions. However, it can also happen for a variety of other - reasons. If ``Commit`` returns ``ABORTED``, the caller should - retry the transaction from the beginning, reusing the same - session. - - On very rare occasions, ``Commit`` might return ``UNKNOWN``. - This can happen, for example, if the client job experiences a 1+ - hour networking failure. At that point, Cloud Spanner has lost - track of the transaction outcome and we recommend that you - perform another read from the database to see the state of - things as they are now. - - Returns: - Callable[[~.CommitRequest], - Awaitable[~.CommitResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'commit' not in self._stubs: - self._stubs['commit'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Commit', - request_serializer=spanner.CommitRequest.serialize, - response_deserializer=commit_response.CommitResponse.deserialize, - ) - return self._stubs['commit'] - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the rollback method over gRPC. - - Rolls back a transaction, releasing any locks it holds. It's a - good idea to call this for any transaction that includes one or - more [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and - ultimately decides not to commit. - - ``Rollback`` returns ``OK`` if it successfully aborts the - transaction, the transaction was already aborted, or the - transaction isn't found. ``Rollback`` never returns ``ABORTED``. - - Returns: - Callable[[~.RollbackRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rollback' not in self._stubs: - self._stubs['rollback'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/Rollback', - request_serializer=spanner.RollbackRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['rollback'] - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - Awaitable[spanner.PartitionResponse]]: - r"""Return a callable for the partition query method over gRPC. - - Creates a set of partition tokens that can be used to execute a - query operation in parallel. Each of the returned partition - tokens can be used by - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] - to specify a subset of the query result to read. The same - session and read-only transaction must be used by the - ``PartitionQueryRequest`` used to create the partition tokens - and the ``ExecuteSqlRequests`` that use the partition tokens. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the query, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionQueryRequest], - Awaitable[~.PartitionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_query' not in self._stubs: - self._stubs['partition_query'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionQuery', - request_serializer=spanner.PartitionQueryRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_query'] - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - Awaitable[spanner.PartitionResponse]]: - r"""Return a callable for the partition read method over gRPC. - - Creates a set of partition tokens that can be used to execute a - read operation in parallel. Each of the returned partition - tokens can be used by - [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to - specify a subset of the read result to read. The same session - and read-only transaction must be used by the - ``PartitionReadRequest`` used to create the partition tokens and - the ``ReadRequests`` that use the partition tokens. There are no - ordering guarantees on rows returned among the returned - partition tokens, or even within each individual - ``StreamingRead`` call issued with a ``partition_token``. - - Partition tokens become invalid when the session used to create - them is deleted, is idle for too long, begins a new transaction, - or becomes too old. When any of these happen, it isn't possible - to resume the read, and the whole operation must be restarted - from the beginning. - - Returns: - Callable[[~.PartitionReadRequest], - Awaitable[~.PartitionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'partition_read' not in self._stubs: - self._stubs['partition_read'] = self._logged_channel.unary_unary( - '/google.spanner.v1.Spanner/PartitionRead', - request_serializer=spanner.PartitionReadRequest.serialize, - response_deserializer=spanner.PartitionResponse.deserialize, - ) - return self._stubs['partition_read'] - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - Awaitable[spanner.BatchWriteResponse]]: - r"""Return a callable for the batch write method over gRPC. - - Batches the supplied mutation groups in a collection of - efficient transactions. All mutations in a group are committed - atomically. However, mutations across groups can be committed - non-atomically in an unspecified order and thus, they must be - independent of each other. Partial failure is possible, that is, - some groups might have been committed successfully, while some - might have failed. The results of individual batches are - streamed into the response as the batches are applied. - - ``BatchWrite`` requests are not replay protected, meaning that - each mutation group can be applied more than once. Replays of - non-idempotent mutations can have undesirable effects. For - example, replays of an insert mutation can produce an already - exists error or if you use generated or commit timestamp-based - keys, it can result in additional rows being added to the - mutation's table. We recommend structuring your mutation groups - to be idempotent to avoid this issue. - - Returns: - Callable[[~.BatchWriteRequest], - Awaitable[~.BatchWriteResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_write' not in self._stubs: - self._stubs['batch_write'] = self._logged_channel.unary_stream( - '/google.spanner.v1.Spanner/BatchWrite', - request_serializer=spanner.BatchWriteRequest.serialize, - response_deserializer=spanner.BatchWriteResponse.deserialize, - ) - return self._stubs['batch_write'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_session: self._wrap_method( - self.create_session, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_create_sessions: self._wrap_method( - self.batch_create_sessions, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_session: self._wrap_method( - self.get_session, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.list_sessions: self._wrap_method( - self.list_sessions, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_session: self._wrap_method( - self.delete_session, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_sql: self._wrap_method( - self.execute_sql, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.execute_streaming_sql: self._wrap_method( - self.execute_streaming_sql, - default_timeout=3600.0, - client_info=client_info, - ), - self.execute_batch_dml: self._wrap_method( - self.execute_batch_dml, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.read: self._wrap_method( - self.read, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.streaming_read: self._wrap_method( - self.streaming_read, - default_timeout=3600.0, - client_info=client_info, - ), - self.begin_transaction: self._wrap_method( - self.begin_transaction, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.commit: self._wrap_method( - self.commit, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.rollback: self._wrap_method( - self.rollback, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_query: self._wrap_method( - self.partition_query, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.partition_read: self._wrap_method( - self.partition_read, - default_retry=retries.AsyncRetry( - initial=0.25, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.batch_write: self._wrap_method( - self.batch_write, - default_timeout=3600.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'SpannerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py deleted file mode 100644 index 206ddb9999..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest.py +++ /dev/null @@ -1,2898 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseSpannerRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class SpannerRestInterceptor: - """Interceptor for Spanner. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the SpannerRestTransport. - - .. code-block:: python - class MyCustomSpannerInterceptor(SpannerRestInterceptor): - def pre_batch_create_sessions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_create_sessions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_batch_write(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_write(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_begin_transaction(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_begin_transaction(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_commit(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_commit(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_session(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_session(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_session(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_execute_batch_dml(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_batch_dml(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_execute_sql(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_sql(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_execute_streaming_sql(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_execute_streaming_sql(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_session(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_session(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_sessions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_sessions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_partition_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_partition_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_partition_read(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_partition_read(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_read(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_read(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rollback(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_streaming_read(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_streaming_read(self, response): - logging.log(f"Received response: {response}") - return response - - transport = SpannerRestTransport(interceptor=MyCustomSpannerInterceptor()) - client = SpannerClient(transport=transport) - - - """ - def pre_batch_create_sessions(self, request: spanner.BatchCreateSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_create_sessions - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_batch_create_sessions(self, response: spanner.BatchCreateSessionsResponse) -> spanner.BatchCreateSessionsResponse: - """Post-rpc interceptor for batch_create_sessions - - DEPRECATED. Please use the `post_batch_create_sessions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_batch_create_sessions` interceptor runs - before the `post_batch_create_sessions_with_metadata` interceptor. - """ - return response - - def post_batch_create_sessions_with_metadata(self, response: spanner.BatchCreateSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchCreateSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_create_sessions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_batch_create_sessions_with_metadata` - interceptor in new development instead of the `post_batch_create_sessions` interceptor. - When both interceptors are used, this `post_batch_create_sessions_with_metadata` interceptor runs after the - `post_batch_create_sessions` interceptor. The (possibly modified) response returned by - `post_batch_create_sessions` will be passed to - `post_batch_create_sessions_with_metadata`. - """ - return response, metadata - - def pre_batch_write(self, request: spanner.BatchWriteRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BatchWriteRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_write - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_batch_write(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for batch_write - - DEPRECATED. Please use the `post_batch_write_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_batch_write` interceptor runs - before the `post_batch_write_with_metadata` interceptor. - """ - return response - - def post_batch_write_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_write - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_batch_write_with_metadata` - interceptor in new development instead of the `post_batch_write` interceptor. - When both interceptors are used, this `post_batch_write_with_metadata` interceptor runs after the - `post_batch_write` interceptor. The (possibly modified) response returned by - `post_batch_write` will be passed to - `post_batch_write_with_metadata`. - """ - return response, metadata - - def pre_begin_transaction(self, request: spanner.BeginTransactionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.BeginTransactionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for begin_transaction - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_begin_transaction(self, response: transaction.Transaction) -> transaction.Transaction: - """Post-rpc interceptor for begin_transaction - - DEPRECATED. Please use the `post_begin_transaction_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_begin_transaction` interceptor runs - before the `post_begin_transaction_with_metadata` interceptor. - """ - return response - - def post_begin_transaction_with_metadata(self, response: transaction.Transaction, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transaction.Transaction, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for begin_transaction - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_begin_transaction_with_metadata` - interceptor in new development instead of the `post_begin_transaction` interceptor. - When both interceptors are used, this `post_begin_transaction_with_metadata` interceptor runs after the - `post_begin_transaction` interceptor. The (possibly modified) response returned by - `post_begin_transaction` will be passed to - `post_begin_transaction_with_metadata`. - """ - return response, metadata - - def pre_commit(self, request: spanner.CommitRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CommitRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for commit - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_commit(self, response: commit_response.CommitResponse) -> commit_response.CommitResponse: - """Post-rpc interceptor for commit - - DEPRECATED. Please use the `post_commit_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_commit` interceptor runs - before the `post_commit_with_metadata` interceptor. - """ - return response - - def post_commit_with_metadata(self, response: commit_response.CommitResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[commit_response.CommitResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for commit - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_commit_with_metadata` - interceptor in new development instead of the `post_commit` interceptor. - When both interceptors are used, this `post_commit_with_metadata` interceptor runs after the - `post_commit` interceptor. The (possibly modified) response returned by - `post_commit` will be passed to - `post_commit_with_metadata`. - """ - return response, metadata - - def pre_create_session(self, request: spanner.CreateSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.CreateSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_session - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_create_session(self, response: spanner.Session) -> spanner.Session: - """Post-rpc interceptor for create_session - - DEPRECATED. Please use the `post_create_session_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_create_session` interceptor runs - before the `post_create_session_with_metadata` interceptor. - """ - return response - - def post_create_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_session - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_create_session_with_metadata` - interceptor in new development instead of the `post_create_session` interceptor. - When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the - `post_create_session` interceptor. The (possibly modified) response returned by - `post_create_session` will be passed to - `post_create_session_with_metadata`. - """ - return response, metadata - - def pre_delete_session(self, request: spanner.DeleteSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.DeleteSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_session - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def pre_execute_batch_dml(self, request: spanner.ExecuteBatchDmlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for execute_batch_dml - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_execute_batch_dml(self, response: spanner.ExecuteBatchDmlResponse) -> spanner.ExecuteBatchDmlResponse: - """Post-rpc interceptor for execute_batch_dml - - DEPRECATED. Please use the `post_execute_batch_dml_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_execute_batch_dml` interceptor runs - before the `post_execute_batch_dml_with_metadata` interceptor. - """ - return response - - def post_execute_batch_dml_with_metadata(self, response: spanner.ExecuteBatchDmlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteBatchDmlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for execute_batch_dml - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_execute_batch_dml_with_metadata` - interceptor in new development instead of the `post_execute_batch_dml` interceptor. - When both interceptors are used, this `post_execute_batch_dml_with_metadata` interceptor runs after the - `post_execute_batch_dml` interceptor. The (possibly modified) response returned by - `post_execute_batch_dml` will be passed to - `post_execute_batch_dml_with_metadata`. - """ - return response, metadata - - def pre_execute_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for execute_sql - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_execute_sql(self, response: result_set.ResultSet) -> result_set.ResultSet: - """Post-rpc interceptor for execute_sql - - DEPRECATED. Please use the `post_execute_sql_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_execute_sql` interceptor runs - before the `post_execute_sql_with_metadata` interceptor. - """ - return response - - def post_execute_sql_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for execute_sql - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_execute_sql_with_metadata` - interceptor in new development instead of the `post_execute_sql` interceptor. - When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the - `post_execute_sql` interceptor. The (possibly modified) response returned by - `post_execute_sql` will be passed to - `post_execute_sql_with_metadata`. - """ - return response, metadata - - def pre_execute_streaming_sql(self, request: spanner.ExecuteSqlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ExecuteSqlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for execute_streaming_sql - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_execute_streaming_sql(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for execute_streaming_sql - - DEPRECATED. Please use the `post_execute_streaming_sql_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_execute_streaming_sql` interceptor runs - before the `post_execute_streaming_sql_with_metadata` interceptor. - """ - return response - - def post_execute_streaming_sql_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for execute_streaming_sql - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_execute_streaming_sql_with_metadata` - interceptor in new development instead of the `post_execute_streaming_sql` interceptor. - When both interceptors are used, this `post_execute_streaming_sql_with_metadata` interceptor runs after the - `post_execute_streaming_sql` interceptor. The (possibly modified) response returned by - `post_execute_streaming_sql` will be passed to - `post_execute_streaming_sql_with_metadata`. - """ - return response, metadata - - def pre_get_session(self, request: spanner.GetSessionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.GetSessionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_session - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_get_session(self, response: spanner.Session) -> spanner.Session: - """Post-rpc interceptor for get_session - - DEPRECATED. Please use the `post_get_session_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_get_session` interceptor runs - before the `post_get_session_with_metadata` interceptor. - """ - return response - - def post_get_session_with_metadata(self, response: spanner.Session, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.Session, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_session - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_get_session_with_metadata` - interceptor in new development instead of the `post_get_session` interceptor. - When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the - `post_get_session` interceptor. The (possibly modified) response returned by - `post_get_session` will be passed to - `post_get_session_with_metadata`. - """ - return response, metadata - - def pre_list_sessions(self, request: spanner.ListSessionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_sessions - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_list_sessions(self, response: spanner.ListSessionsResponse) -> spanner.ListSessionsResponse: - """Post-rpc interceptor for list_sessions - - DEPRECATED. Please use the `post_list_sessions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_list_sessions` interceptor runs - before the `post_list_sessions_with_metadata` interceptor. - """ - return response - - def post_list_sessions_with_metadata(self, response: spanner.ListSessionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_sessions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_list_sessions_with_metadata` - interceptor in new development instead of the `post_list_sessions` interceptor. - When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the - `post_list_sessions` interceptor. The (possibly modified) response returned by - `post_list_sessions` will be passed to - `post_list_sessions_with_metadata`. - """ - return response, metadata - - def pre_partition_query(self, request: spanner.PartitionQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for partition_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_partition_query(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: - """Post-rpc interceptor for partition_query - - DEPRECATED. Please use the `post_partition_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_partition_query` interceptor runs - before the `post_partition_query_with_metadata` interceptor. - """ - return response - - def post_partition_query_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for partition_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_partition_query_with_metadata` - interceptor in new development instead of the `post_partition_query` interceptor. - When both interceptors are used, this `post_partition_query_with_metadata` interceptor runs after the - `post_partition_query` interceptor. The (possibly modified) response returned by - `post_partition_query` will be passed to - `post_partition_query_with_metadata`. - """ - return response, metadata - - def pre_partition_read(self, request: spanner.PartitionReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for partition_read - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_partition_read(self, response: spanner.PartitionResponse) -> spanner.PartitionResponse: - """Post-rpc interceptor for partition_read - - DEPRECATED. Please use the `post_partition_read_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_partition_read` interceptor runs - before the `post_partition_read_with_metadata` interceptor. - """ - return response - - def post_partition_read_with_metadata(self, response: spanner.PartitionResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.PartitionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for partition_read - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_partition_read_with_metadata` - interceptor in new development instead of the `post_partition_read` interceptor. - When both interceptors are used, this `post_partition_read_with_metadata` interceptor runs after the - `post_partition_read` interceptor. The (possibly modified) response returned by - `post_partition_read` will be passed to - `post_partition_read_with_metadata`. - """ - return response, metadata - - def pre_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for read - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_read(self, response: result_set.ResultSet) -> result_set.ResultSet: - """Post-rpc interceptor for read - - DEPRECATED. Please use the `post_read_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_read` interceptor runs - before the `post_read_with_metadata` interceptor. - """ - return response - - def post_read_with_metadata(self, response: result_set.ResultSet, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[result_set.ResultSet, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for read - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_read_with_metadata` - interceptor in new development instead of the `post_read` interceptor. - When both interceptors are used, this `post_read_with_metadata` interceptor runs after the - `post_read` interceptor. The (possibly modified) response returned by - `post_read` will be passed to - `post_read_with_metadata`. - """ - return response, metadata - - def pre_rollback(self, request: spanner.RollbackRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.RollbackRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rollback - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def pre_streaming_read(self, request: spanner.ReadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner.ReadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for streaming_read - - Override in a subclass to manipulate the request or metadata - before they are sent to the Spanner server. - """ - return request, metadata - - def post_streaming_read(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - """Post-rpc interceptor for streaming_read - - DEPRECATED. Please use the `post_streaming_read_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the Spanner server but before - it is returned to user code. This `post_streaming_read` interceptor runs - before the `post_streaming_read_with_metadata` interceptor. - """ - return response - - def post_streaming_read_with_metadata(self, response: rest_streaming.ResponseIterator, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for streaming_read - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the Spanner server but before it is returned to user code. - - We recommend only using this `post_streaming_read_with_metadata` - interceptor in new development instead of the `post_streaming_read` interceptor. - When both interceptors are used, this `post_streaming_read_with_metadata` interceptor runs after the - `post_streaming_read` interceptor. The (possibly modified) response returned by - `post_streaming_read` will be passed to - `post_streaming_read_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class SpannerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: SpannerRestInterceptor - - -class SpannerRestTransport(_BaseSpannerRestTransport): - """REST backend synchronous transport for Spanner. - - Cloud Spanner API - - The Cloud Spanner API can be used to manage sessions and execute - transactions on data stored in Cloud Spanner databases. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[SpannerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or SpannerRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _BatchCreateSessions(_BaseSpannerRestTransport._BaseBatchCreateSessions, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.BatchCreateSessions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.BatchCreateSessionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.BatchCreateSessionsResponse: - r"""Call the batch create sessions method over HTTP. - - Args: - request (~.spanner.BatchCreateSessionsRequest): - The request object. The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.BatchCreateSessionsResponse: - The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - """ - - http_options = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_http_options() - - request, metadata = self._interceptor.pre_batch_create_sessions(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseBatchCreateSessions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.BatchCreateSessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchCreateSessions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._BatchCreateSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.BatchCreateSessionsResponse() - pb_resp = spanner.BatchCreateSessionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_create_sessions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_create_sessions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.BatchCreateSessionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.batch_create_sessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchCreateSessions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BatchWrite(_BaseSpannerRestTransport._BaseBatchWrite, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.BatchWrite") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__(self, - request: spanner.BatchWriteRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the batch write method over HTTP. - - Args: - request (~.spanner.BatchWriteRequest): - The request object. The request for - [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.BatchWriteResponse: - The result of applying a batch of - mutations. - - """ - - http_options = _BaseSpannerRestTransport._BaseBatchWrite._get_http_options() - - request, metadata = self._interceptor.pre_batch_write(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseBatchWrite._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseBatchWrite._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseBatchWrite._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.BatchWrite", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchWrite", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._BatchWrite._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, spanner.BatchWriteResponse) - - resp = self._interceptor.post_batch_write(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_write_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.batch_write", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BatchWrite", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BeginTransaction(_BaseSpannerRestTransport._BaseBeginTransaction, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.BeginTransaction") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.BeginTransactionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> transaction.Transaction: - r"""Call the begin transaction method over HTTP. - - Args: - request (~.spanner.BeginTransactionRequest): - The request object. The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.transaction.Transaction: - A transaction. - """ - - http_options = _BaseSpannerRestTransport._BaseBeginTransaction._get_http_options() - - request, metadata = self._interceptor.pre_begin_transaction(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseBeginTransaction._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseBeginTransaction._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseBeginTransaction._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.BeginTransaction", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BeginTransaction", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._BeginTransaction._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = transaction.Transaction() - pb_resp = transaction.Transaction.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_begin_transaction(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_begin_transaction_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = transaction.Transaction.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.begin_transaction", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "BeginTransaction", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Commit(_BaseSpannerRestTransport._BaseCommit, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.Commit") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.CommitRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> commit_response.CommitResponse: - r"""Call the commit method over HTTP. - - Args: - request (~.spanner.CommitRequest): - The request object. The request for - [Commit][google.spanner.v1.Spanner.Commit]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.commit_response.CommitResponse: - The response for - [Commit][google.spanner.v1.Spanner.Commit]. - - """ - - http_options = _BaseSpannerRestTransport._BaseCommit._get_http_options() - - request, metadata = self._interceptor.pre_commit(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseCommit._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseCommit._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseCommit._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.Commit", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Commit", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._Commit._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = commit_response.CommitResponse() - pb_resp = commit_response.CommitResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_commit(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_commit_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = commit_response.CommitResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.commit", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Commit", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateSession(_BaseSpannerRestTransport._BaseCreateSession, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.CreateSession") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.CreateSessionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.Session: - r"""Call the create session method over HTTP. - - Args: - request (~.spanner.CreateSessionRequest): - The request object. The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.Session: - A session in the Cloud Spanner API. - """ - - http_options = _BaseSpannerRestTransport._BaseCreateSession._get_http_options() - - request, metadata = self._interceptor.pre_create_session(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseCreateSession._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseCreateSession._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseCreateSession._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.CreateSession", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "CreateSession", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._CreateSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.Session() - pb_resp = spanner.Session.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_session(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_session_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.Session.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.create_session", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "CreateSession", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteSession(_BaseSpannerRestTransport._BaseDeleteSession, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.DeleteSession") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner.DeleteSessionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete session method over HTTP. - - Args: - request (~.spanner.DeleteSessionRequest): - The request object. The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseSpannerRestTransport._BaseDeleteSession._get_http_options() - - request, metadata = self._interceptor.pre_delete_session(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseDeleteSession._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseDeleteSession._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.DeleteSession", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "DeleteSession", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._DeleteSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _ExecuteBatchDml(_BaseSpannerRestTransport._BaseExecuteBatchDml, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ExecuteBatchDml") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.ExecuteBatchDmlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.ExecuteBatchDmlResponse: - r"""Call the execute batch dml method over HTTP. - - Args: - request (~.spanner.ExecuteBatchDmlRequest): - The request object. The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.ExecuteBatchDmlResponse: - The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of - [ResultSet][google.spanner.v1.ResultSet] messages, one - for each DML statement that has successfully executed, - in the same order as the statements in the request. If a - statement fails, the status in the response body - identifies the cause of the failure. - - To check for DML statements that failed, use the - following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value ``OK`` - indicates that all statements were executed - successfully. - 2. If the status was not ``OK``, check the number of - result sets in the response. If the response contains - ``N`` [ResultSet][google.spanner.v1.ResultSet] - messages, then statement ``N+1`` in the request - failed. - - Example 1: - - - Request: 5 DML statements, all executed successfully. - - Response: 5 [ResultSet][google.spanner.v1.ResultSet] - messages, with the status ``OK``. - - Example 2: - - - Request: 5 DML statements. The third statement has a - syntax error. - - Response: 2 [ResultSet][google.spanner.v1.ResultSet] - messages, and a syntax error (``INVALID_ARGUMENT``) - status. The number of - [ResultSet][google.spanner.v1.ResultSet] messages - indicates that the third statement failed, and the - fourth and fifth statements were not executed. - - """ - - http_options = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_http_options() - - request, metadata = self._interceptor.pre_execute_batch_dml(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseExecuteBatchDml._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ExecuteBatchDml", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteBatchDml", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ExecuteBatchDml._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.ExecuteBatchDmlResponse() - pb_resp = spanner.ExecuteBatchDmlResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_execute_batch_dml(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_batch_dml_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.ExecuteBatchDmlResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.execute_batch_dml", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteBatchDml", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExecuteSql(_BaseSpannerRestTransport._BaseExecuteSql, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ExecuteSql") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.ExecuteSqlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> result_set.ResultSet: - r"""Call the execute sql method over HTTP. - - Args: - request (~.spanner.ExecuteSqlRequest): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - - http_options = _BaseSpannerRestTransport._BaseExecuteSql._get_http_options() - - request, metadata = self._interceptor.pre_execute_sql(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseExecuteSql._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseExecuteSql._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseExecuteSql._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ExecuteSql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteSql", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ExecuteSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = result_set.ResultSet() - pb_resp = result_set.ResultSet.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_execute_sql(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_sql_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = result_set.ResultSet.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.execute_sql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteSql", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExecuteStreamingSql(_BaseSpannerRestTransport._BaseExecuteStreamingSql, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ExecuteStreamingSql") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__(self, - request: spanner.ExecuteSqlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the execute streaming sql method over HTTP. - - Args: - request (~.spanner.ExecuteSqlRequest): - The request object. The request for - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.PartialResultSet: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - - http_options = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_http_options() - - request, metadata = self._interceptor.pre_execute_streaming_sql(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseExecuteStreamingSql._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ExecuteStreamingSql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteStreamingSql", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ExecuteStreamingSql._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) - - resp = self._interceptor.post_execute_streaming_sql(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_execute_streaming_sql_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.execute_streaming_sql", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ExecuteStreamingSql", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetSession(_BaseSpannerRestTransport._BaseGetSession, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.GetSession") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner.GetSessionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.Session: - r"""Call the get session method over HTTP. - - Args: - request (~.spanner.GetSessionRequest): - The request object. The request for - [GetSession][google.spanner.v1.Spanner.GetSession]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.Session: - A session in the Cloud Spanner API. - """ - - http_options = _BaseSpannerRestTransport._BaseGetSession._get_http_options() - - request, metadata = self._interceptor.pre_get_session(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseGetSession._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseGetSession._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.GetSession", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "GetSession", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._GetSession._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.Session() - pb_resp = spanner.Session.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_session(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_session_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.Session.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.get_session", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "GetSession", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListSessions(_BaseSpannerRestTransport._BaseListSessions, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.ListSessions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner.ListSessionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.ListSessionsResponse: - r"""Call the list sessions method over HTTP. - - Args: - request (~.spanner.ListSessionsRequest): - The request object. The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.ListSessionsResponse: - The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - """ - - http_options = _BaseSpannerRestTransport._BaseListSessions._get_http_options() - - request, metadata = self._interceptor.pre_list_sessions(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseListSessions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseListSessions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.ListSessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ListSessions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._ListSessions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.ListSessionsResponse() - pb_resp = spanner.ListSessionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_sessions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_sessions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.ListSessionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.list_sessions", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "ListSessions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _PartitionQuery(_BaseSpannerRestTransport._BasePartitionQuery, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.PartitionQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.PartitionQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.PartitionResponse: - r"""Call the partition query method over HTTP. - - Args: - request (~.spanner.PartitionQueryRequest): - The request object. The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.PartitionResponse: - The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - - http_options = _BaseSpannerRestTransport._BasePartitionQuery._get_http_options() - - request, metadata = self._interceptor.pre_partition_query(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BasePartitionQuery._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BasePartitionQuery._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BasePartitionQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.PartitionQuery", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._PartitionQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.PartitionResponse() - pb_resp = spanner.PartitionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_partition_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_partition_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.PartitionResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.partition_query", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _PartitionRead(_BaseSpannerRestTransport._BasePartitionRead, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.PartitionRead") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.PartitionReadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner.PartitionResponse: - r"""Call the partition read method over HTTP. - - Args: - request (~.spanner.PartitionReadRequest): - The request object. The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner.PartitionResponse: - The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - """ - - http_options = _BaseSpannerRestTransport._BasePartitionRead._get_http_options() - - request, metadata = self._interceptor.pre_partition_read(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BasePartitionRead._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BasePartitionRead._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BasePartitionRead._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.PartitionRead", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionRead", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._PartitionRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner.PartitionResponse() - pb_resp = spanner.PartitionResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_partition_read(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_partition_read_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner.PartitionResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.partition_read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "PartitionRead", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Read(_BaseSpannerRestTransport._BaseRead, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.Read") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.ReadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> result_set.ResultSet: - r"""Call the read method over HTTP. - - Args: - request (~.spanner.ReadRequest): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.ResultSet: - Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - """ - - http_options = _BaseSpannerRestTransport._BaseRead._get_http_options() - - request, metadata = self._interceptor.pre_read(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseRead._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseRead._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseRead._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.Read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Read", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._Read._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = result_set.ResultSet() - pb_resp = result_set.ResultSet.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_read(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_read_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = result_set.ResultSet.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Read", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Rollback(_BaseSpannerRestTransport._BaseRollback, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.Rollback") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner.RollbackRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the rollback method over HTTP. - - Args: - request (~.spanner.RollbackRequest): - The request object. The request for - [Rollback][google.spanner.v1.Spanner.Rollback]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseSpannerRestTransport._BaseRollback._get_http_options() - - request, metadata = self._interceptor.pre_rollback(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseRollback._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseRollback._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseRollback._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.Rollback", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "Rollback", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._Rollback._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _StreamingRead(_BaseSpannerRestTransport._BaseStreamingRead, SpannerRestStub): - def __hash__(self): - return hash("SpannerRestTransport.StreamingRead") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - stream=True, - ) - return response - - def __call__(self, - request: spanner.ReadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> rest_streaming.ResponseIterator: - r"""Call the streaming read method over HTTP. - - Args: - request (~.spanner.ReadRequest): - The request object. The request for [Read][google.spanner.v1.Spanner.Read] - and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.result_set.PartialResultSet: - Partial results from a streaming read - or SQL query. Streaming reads and SQL - queries better tolerate large result - sets, large rows, and large values, but - are a little trickier to consume. - - """ - - http_options = _BaseSpannerRestTransport._BaseStreamingRead._get_http_options() - - request, metadata = self._interceptor.pre_streaming_read(request, metadata) - transcoded_request = _BaseSpannerRestTransport._BaseStreamingRead._get_transcoded_request(http_options, request) - - body = _BaseSpannerRestTransport._BaseStreamingRead._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseSpannerRestTransport._BaseStreamingRead._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner_v1.SpannerClient.StreamingRead", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "StreamingRead", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = SpannerRestTransport._StreamingRead._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = rest_streaming.ResponseIterator(response, result_set.PartialResultSet) - - resp = self._interceptor.post_streaming_read(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_streaming_read_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - http_response = { - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner_v1.SpannerClient.streaming_read", - extra = { - "serviceName": "google.spanner.v1.Spanner", - "rpcName": "StreamingRead", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def batch_create_sessions(self) -> Callable[ - [spanner.BatchCreateSessionsRequest], - spanner.BatchCreateSessionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchCreateSessions(self._session, self._host, self._interceptor) # type: ignore - - @property - def batch_write(self) -> Callable[ - [spanner.BatchWriteRequest], - spanner.BatchWriteResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchWrite(self._session, self._host, self._interceptor) # type: ignore - - @property - def begin_transaction(self) -> Callable[ - [spanner.BeginTransactionRequest], - transaction.Transaction]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BeginTransaction(self._session, self._host, self._interceptor) # type: ignore - - @property - def commit(self) -> Callable[ - [spanner.CommitRequest], - commit_response.CommitResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Commit(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_session(self) -> Callable[ - [spanner.CreateSessionRequest], - spanner.Session]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSession(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_session(self) -> Callable[ - [spanner.DeleteSessionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSession(self._session, self._host, self._interceptor) # type: ignore - - @property - def execute_batch_dml(self) -> Callable[ - [spanner.ExecuteBatchDmlRequest], - spanner.ExecuteBatchDmlResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecuteBatchDml(self._session, self._host, self._interceptor) # type: ignore - - @property - def execute_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.ResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecuteSql(self._session, self._host, self._interceptor) # type: ignore - - @property - def execute_streaming_sql(self) -> Callable[ - [spanner.ExecuteSqlRequest], - result_set.PartialResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExecuteStreamingSql(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_session(self) -> Callable[ - [spanner.GetSessionRequest], - spanner.Session]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSession(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_sessions(self) -> Callable[ - [spanner.ListSessionsRequest], - spanner.ListSessionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSessions(self._session, self._host, self._interceptor) # type: ignore - - @property - def partition_query(self) -> Callable[ - [spanner.PartitionQueryRequest], - spanner.PartitionResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._PartitionQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def partition_read(self) -> Callable[ - [spanner.PartitionReadRequest], - spanner.PartitionResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._PartitionRead(self._session, self._host, self._interceptor) # type: ignore - - @property - def read(self) -> Callable[ - [spanner.ReadRequest], - result_set.ResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Read(self._session, self._host, self._interceptor) # type: ignore - - @property - def rollback(self) -> Callable[ - [spanner.RollbackRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Rollback(self._session, self._host, self._interceptor) # type: ignore - - @property - def streaming_read(self) -> Callable[ - [spanner.ReadRequest], - result_set.PartialResultSet]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StreamingRead(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'SpannerRestTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py deleted file mode 100644 index 79eb05d0cf..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/services/spanner/transports/rest_base.py +++ /dev/null @@ -1,817 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import SpannerTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseSpannerRestTransport(SpannerTransport): - """Base REST backend transport for Spanner. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseBatchCreateSessions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.BatchCreateSessionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseBatchCreateSessions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBatchWrite: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.BatchWriteRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseBatchWrite._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBeginTransaction: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.BeginTransactionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseBeginTransaction._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCommit: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.CommitRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseCommit._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateSession: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.CreateSessionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseCreateSession._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteSession: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.DeleteSessionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseDeleteSession._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExecuteBatchDml: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeBatchDml', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ExecuteBatchDmlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseExecuteBatchDml._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExecuteSql: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeSql', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ExecuteSqlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseExecuteSql._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExecuteStreamingSql: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:executeStreamingSql', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ExecuteSqlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseExecuteStreamingSql._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetSession: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/sessions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.GetSessionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseGetSession._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListSessions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/sessions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ListSessionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseListSessions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BasePartitionQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionQuery', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.PartitionQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BasePartitionQuery._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BasePartitionRead: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:partitionRead', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.PartitionReadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BasePartitionRead._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRead: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:read', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ReadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseRead._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRollback: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.RollbackRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseRollback._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseStreamingRead: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{session=projects/*/instances/*/databases/*/sessions/*}:streamingRead', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner.ReadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseSpannerRestTransport._BaseStreamingRead._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseSpannerRestTransport', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py deleted file mode 100644 index d4c6938ee1..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/__init__.py +++ /dev/null @@ -1,122 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .change_stream import ( - ChangeStreamRecord, -) -from .commit_response import ( - CommitResponse, -) -from .keys import ( - KeyRange, - KeySet, -) -from .mutation import ( - Mutation, -) -from .query_plan import ( - PlanNode, - QueryPlan, -) -from .result_set import ( - PartialResultSet, - ResultSet, - ResultSetMetadata, - ResultSetStats, -) -from .spanner import ( - BatchCreateSessionsRequest, - BatchCreateSessionsResponse, - BatchWriteRequest, - BatchWriteResponse, - BeginTransactionRequest, - CommitRequest, - CreateSessionRequest, - DeleteSessionRequest, - DirectedReadOptions, - ExecuteBatchDmlRequest, - ExecuteBatchDmlResponse, - ExecuteSqlRequest, - GetSessionRequest, - ListSessionsRequest, - ListSessionsResponse, - Partition, - PartitionOptions, - PartitionQueryRequest, - PartitionReadRequest, - PartitionResponse, - ReadRequest, - RequestOptions, - RollbackRequest, - Session, -) -from .transaction import ( - MultiplexedSessionPrecommitToken, - Transaction, - TransactionOptions, - TransactionSelector, -) -from .type import ( - StructType, - Type, - TypeAnnotationCode, - TypeCode, -) - -__all__ = ( - 'ChangeStreamRecord', - 'CommitResponse', - 'KeyRange', - 'KeySet', - 'Mutation', - 'PlanNode', - 'QueryPlan', - 'PartialResultSet', - 'ResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'BatchWriteRequest', - 'BatchWriteResponse', - 'BeginTransactionRequest', - 'CommitRequest', - 'CreateSessionRequest', - 'DeleteSessionRequest', - 'DirectedReadOptions', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'ExecuteSqlRequest', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'Partition', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'PartitionResponse', - 'ReadRequest', - 'RequestOptions', - 'RollbackRequest', - 'Session', - 'MultiplexedSessionPrecommitToken', - 'Transaction', - 'TransactionOptions', - 'TransactionSelector', - 'StructType', - 'Type', - 'TypeAnnotationCode', - 'TypeCode', -) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py deleted file mode 100644 index 8db7f85bb5..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/change_stream.py +++ /dev/null @@ -1,683 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'ChangeStreamRecord', - }, -) - - -class ChangeStreamRecord(proto.Message): - r"""Spanner Change Streams enable customers to capture and stream out - changes to their Spanner databases in real-time. A change stream can - be created with option partition_mode='IMMUTABLE_KEY_RANGE' or - partition_mode='MUTABLE_KEY_RANGE'. - - This message is only used in Change Streams created with the option - partition_mode='MUTABLE_KEY_RANGE'. Spanner automatically creates a - special Table-Valued Function (TVF) along with each Change Streams. - The function provides access to the change stream's records. The - function is named READ\_ (where - is the name of the change stream), and it - returns a table with only one column called ChangeRecord. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - data_change_record (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord): - Data change record describing a data change - for a change stream partition. - - This field is a member of `oneof`_ ``record``. - heartbeat_record (google.cloud.spanner_v1.types.ChangeStreamRecord.HeartbeatRecord): - Heartbeat record describing a heartbeat for a - change stream partition. - - This field is a member of `oneof`_ ``record``. - partition_start_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionStartRecord): - Partition start record describing a new - change stream partition. - - This field is a member of `oneof`_ ``record``. - partition_end_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEndRecord): - Partition end record describing a terminated - change stream partition. - - This field is a member of `oneof`_ ``record``. - partition_event_record (google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord): - Partition event record describing key range - changes for a change stream partition. - - This field is a member of `oneof`_ ``record``. - """ - - class DataChangeRecord(proto.Message): - r"""A data change record contains a set of changes to a table - with the same modification type (insert, update, or delete) - committed at the same commit timestamp in one change stream - partition for the same transaction. Multiple data change records - can be returned for the same transaction across multiple change - stream partitions. - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Indicates the timestamp in which the change was committed. - DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - - The record sequence number ordering across partitions is - only meaningful in the context of a specific transaction. - Record sequence numbers are unique across partitions for a - specific transaction. Sort the DataChangeRecords for the - same - [server_transaction_id][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.server_transaction_id] - by - [record_sequence][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.record_sequence] - to reconstruct the ordering of the changes within the - transaction. - server_transaction_id (str): - Provides a globally unique string that represents the - transaction in which the change was committed. Multiple - transactions can have the same commit timestamp, but each - transaction has a unique server_transaction_id. - is_last_record_in_transaction_in_partition (bool): - Indicates whether this is the last record for - a transaction in the current partition. Clients - can use this field to determine when all - records for a transaction in the current - partition have been received. - table (str): - Name of the table affected by the change. - column_metadata (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ColumnMetadata]): - Provides metadata describing the columns associated with the - [mods][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.mods] - listed below. - mods (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.Mod]): - Describes the changes that were made. - mod_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModType): - Describes the type of change. - value_capture_type (google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ValueCaptureType): - Describes the value capture type that was - specified in the change stream configuration - when this change was captured. - number_of_records_in_transaction (int): - Indicates the number of data change records - that are part of this transaction across all - change stream partitions. This value can be used - to assemble all the records associated with a - particular transaction. - number_of_partitions_in_transaction (int): - Indicates the number of partitions that - return data change records for this transaction. - This value can be helpful in assembling all - records associated with a particular - transaction. - transaction_tag (str): - Indicates the transaction tag associated with - this transaction. - is_system_transaction (bool): - Indicates whether the transaction is a system - transaction. System transactions include those - issued by time-to-live (TTL), column backfill, - etc. - """ - class ModType(proto.Enum): - r"""Mod type describes the type of change Spanner applied to the data. - For example, if the client submits an INSERT_OR_UPDATE request, - Spanner will perform an insert if there is no existing row and - return ModType INSERT. Alternatively, if there is an existing row, - Spanner will perform an update and return ModType UPDATE. - - Values: - MOD_TYPE_UNSPECIFIED (0): - Not specified. - INSERT (10): - Indicates data was inserted. - UPDATE (20): - Indicates existing data was updated. - DELETE (30): - Indicates existing data was deleted. - """ - MOD_TYPE_UNSPECIFIED = 0 - INSERT = 10 - UPDATE = 20 - DELETE = 30 - - class ValueCaptureType(proto.Enum): - r"""Value capture type describes which values are recorded in the - data change record. - - Values: - VALUE_CAPTURE_TYPE_UNSPECIFIED (0): - Not specified. - OLD_AND_NEW_VALUES (10): - Records both old and new values of the - modified watched columns. - NEW_VALUES (20): - Records only new values of the modified - watched columns. - NEW_ROW (30): - Records new values of all watched columns, - including modified and unmodified columns. - NEW_ROW_AND_OLD_VALUES (40): - Records the new values of all watched - columns, including modified and unmodified - columns. Also records the old values of the - modified columns. - """ - VALUE_CAPTURE_TYPE_UNSPECIFIED = 0 - OLD_AND_NEW_VALUES = 10 - NEW_VALUES = 20 - NEW_ROW = 30 - NEW_ROW_AND_OLD_VALUES = 40 - - class ColumnMetadata(proto.Message): - r"""Metadata for a column. - - Attributes: - name (str): - Name of the column. - type_ (google.cloud.spanner_v1.types.Type): - Type of the column. - is_primary_key (bool): - Indicates whether the column is a primary key - column. - ordinal_position (int): - Ordinal position of the column based on the - original table definition in the schema starting - with a value of 1. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: gs_type.Type = proto.Field( - proto.MESSAGE, - number=2, - message=gs_type.Type, - ) - is_primary_key: bool = proto.Field( - proto.BOOL, - number=3, - ) - ordinal_position: int = proto.Field( - proto.INT64, - number=4, - ) - - class ModValue(proto.Message): - r"""Returns the value and associated metadata for a particular field of - the - [Mod][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.Mod]. - - Attributes: - column_metadata_index (int): - Index within the repeated - [column_metadata][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.column_metadata] - field, to obtain the column metadata for the column that was - modified. - value (google.protobuf.struct_pb2.Value): - The value of the column. - """ - - column_metadata_index: int = proto.Field( - proto.INT32, - number=1, - ) - value: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - - class Mod(proto.Message): - r"""A mod describes all data changes in a watched table row. - - Attributes: - keys (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): - Returns the value of the primary key of the - modified row. - old_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): - Returns the old values before the change for the modified - columns. Always empty for - [INSERT][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.INSERT], - or if old values are not being captured specified by - [value_capture_type][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ValueCaptureType]. - new_values (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.DataChangeRecord.ModValue]): - Returns the new values after the change for the modified - columns. Always empty for - [DELETE][google.spanner.v1.ChangeStreamRecord.DataChangeRecord.ModType.DELETE]. - """ - - keys: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ChangeStreamRecord.DataChangeRecord.ModValue', - ) - old_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ChangeStreamRecord.DataChangeRecord.ModValue', - ) - new_values: MutableSequence['ChangeStreamRecord.DataChangeRecord.ModValue'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ChangeStreamRecord.DataChangeRecord.ModValue', - ) - - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - server_transaction_id: str = proto.Field( - proto.STRING, - number=3, - ) - is_last_record_in_transaction_in_partition: bool = proto.Field( - proto.BOOL, - number=4, - ) - table: str = proto.Field( - proto.STRING, - number=5, - ) - column_metadata: MutableSequence['ChangeStreamRecord.DataChangeRecord.ColumnMetadata'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ChangeStreamRecord.DataChangeRecord.ColumnMetadata', - ) - mods: MutableSequence['ChangeStreamRecord.DataChangeRecord.Mod'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ChangeStreamRecord.DataChangeRecord.Mod', - ) - mod_type: 'ChangeStreamRecord.DataChangeRecord.ModType' = proto.Field( - proto.ENUM, - number=8, - enum='ChangeStreamRecord.DataChangeRecord.ModType', - ) - value_capture_type: 'ChangeStreamRecord.DataChangeRecord.ValueCaptureType' = proto.Field( - proto.ENUM, - number=9, - enum='ChangeStreamRecord.DataChangeRecord.ValueCaptureType', - ) - number_of_records_in_transaction: int = proto.Field( - proto.INT32, - number=10, - ) - number_of_partitions_in_transaction: int = proto.Field( - proto.INT32, - number=11, - ) - transaction_tag: str = proto.Field( - proto.STRING, - number=12, - ) - is_system_transaction: bool = proto.Field( - proto.BOOL, - number=13, - ) - - class HeartbeatRecord(proto.Message): - r"""A heartbeat record is returned as a progress indicator, when - there are no data changes or any other partition record types in - the change stream partition. - - Attributes: - timestamp (google.protobuf.timestamp_pb2.Timestamp): - Indicates the timestamp at which the query - has returned all the records in the change - stream partition with timestamp <= heartbeat - timestamp. The heartbeat timestamp will not be - the same as the timestamps of other record types - in the same partition. - """ - - timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - - class PartitionStartRecord(proto.Message): - r"""A partition start record serves as a notification that the - client should schedule the partitions to be queried. - PartitionStartRecord returns information about one or more - partitions. - - Attributes: - start_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Start timestamp at which the partitions should be queried to - return change stream records with timestamps >= - start_timestamp. DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - partition_tokens (MutableSequence[str]): - Unique partition identifiers to be used in - queries. - """ - - start_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - partition_tokens: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - class PartitionEndRecord(proto.Message): - r"""A partition end record serves as a notification that the - client should stop reading the partition. No further records are - expected to be retrieved on it. - - Attributes: - end_timestamp (google.protobuf.timestamp_pb2.Timestamp): - End timestamp at which the change stream partition is - terminated. All changes generated by this partition will - have timestamps <= end_timestamp. - DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. PartitionEndRecord is the last record - returned for a partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - partition_token (str): - Unique partition identifier describing the terminated change - stream partition. - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEndRecord.partition_token] - is equal to the partition token of the change stream - partition currently queried to return this - PartitionEndRecord. - """ - - end_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - partition_token: str = proto.Field( - proto.STRING, - number=3, - ) - - class PartitionEventRecord(proto.Message): - r"""A partition event record describes key range changes for a change - stream partition. The changes to a row defined by its primary key - can be captured in one change stream partition for a specific time - range, and then be captured in a different change stream partition - for a different time range. This movement of key ranges across - change stream partitions is a reflection of activities, such as - Spanner's dynamic splitting and load balancing, etc. Processing this - event is needed if users want to guarantee processing of the changes - for any key in timestamp order. If time ordered processing of - changes for a primary key is not needed, this event can be ignored. - To guarantee time ordered processing for each primary key, if the - event describes move-ins, the reader of this partition needs to wait - until the readers of the source partitions have processed all - records with timestamps <= this - PartitionEventRecord.commit_timestamp, before advancing beyond this - PartitionEventRecord. If the event describes move-outs, the reader - can notify the readers of the destination partitions that they can - continue processing. - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Indicates the commit timestamp at which the key range change - occurred. DataChangeRecord.commit_timestamps, - PartitionStartRecord.start_timestamps, - PartitionEventRecord.commit_timestamps, and - PartitionEndRecord.end_timestamps can have the same value in - the same partition. - record_sequence (str): - Record sequence numbers are unique and monotonically - increasing (but not necessarily contiguous) for a specific - timestamp across record types in the same partition. To - guarantee ordered processing, the reader should process - records (of potentially different types) in record_sequence - order for a specific timestamp in the same partition. - partition_token (str): - Unique partition identifier describing the partition this - event occurred on. - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] - is equal to the partition token of the change stream - partition currently queried to return this - PartitionEventRecord. - move_in_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveInEvent]): - Set when one or more key ranges are moved into the change - stream partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - Example: Two key ranges are moved into partition (P1) from - partition (P2) and partition (P3) in a single transaction at - timestamp T. - - The PartitionEventRecord returned in P1 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P1" move_in_events { source_partition_token: "P2" } - move_in_events { source_partition_token: "P3" } } - - The PartitionEventRecord returned in P2 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P2" move_out_events { destination_partition_token: "P1" } } - - The PartitionEventRecord returned in P3 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P3" move_out_events { destination_partition_token: "P1" } } - move_out_events (MutableSequence[google.cloud.spanner_v1.types.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent]): - Set when one or more key ranges are moved out of the change - stream partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - Example: Two key ranges are moved out of partition (P1) to - partition (P2) and partition (P3) in a single transaction at - timestamp T. - - The PartitionEventRecord returned in P1 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P1" move_out_events { destination_partition_token: "P2" } - move_out_events { destination_partition_token: "P3" } } - - The PartitionEventRecord returned in P2 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P2" move_in_events { source_partition_token: "P1" } } - - The PartitionEventRecord returned in P3 will reflect the - move as: - - PartitionEventRecord { commit_timestamp: T partition_token: - "P3" move_in_events { source_partition_token: "P1" } } - """ - - class MoveInEvent(proto.Message): - r"""Describes move-in of the key ranges into the change stream partition - identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - To maintain processing the changes for a particular key in timestamp - order, the query processing the change stream partition identified - by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] - should not advance beyond the partition event record commit - timestamp until the queries processing the source change stream - partitions have processed all change stream records with timestamps - <= the partition event record commit timestamp. - - Attributes: - source_partition_token (str): - An unique partition identifier describing the - source change stream partition that recorded - changes for the key range that is moving into - this partition. - """ - - source_partition_token: str = proto.Field( - proto.STRING, - number=1, - ) - - class MoveOutEvent(proto.Message): - r"""Describes move-out of the key ranges out of the change stream - partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token]. - - To maintain processing the changes for a particular key in timestamp - order, the query processing the - [MoveOutEvent][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.MoveOutEvent] - in the partition identified by - [partition_token][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.partition_token] - should inform the queries processing the destination partitions that - they can unblock and proceed processing records past the - [commit_timestamp][google.spanner.v1.ChangeStreamRecord.PartitionEventRecord.commit_timestamp]. - - Attributes: - destination_partition_token (str): - An unique partition identifier describing the - destination change stream partition that will - record changes for the key range that is moving - out of this partition. - """ - - destination_partition_token: str = proto.Field( - proto.STRING, - number=1, - ) - - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - record_sequence: str = proto.Field( - proto.STRING, - number=2, - ) - partition_token: str = proto.Field( - proto.STRING, - number=3, - ) - move_in_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveInEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ChangeStreamRecord.PartitionEventRecord.MoveInEvent', - ) - move_out_events: MutableSequence['ChangeStreamRecord.PartitionEventRecord.MoveOutEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='ChangeStreamRecord.PartitionEventRecord.MoveOutEvent', - ) - - data_change_record: DataChangeRecord = proto.Field( - proto.MESSAGE, - number=1, - oneof='record', - message=DataChangeRecord, - ) - heartbeat_record: HeartbeatRecord = proto.Field( - proto.MESSAGE, - number=2, - oneof='record', - message=HeartbeatRecord, - ) - partition_start_record: PartitionStartRecord = proto.Field( - proto.MESSAGE, - number=3, - oneof='record', - message=PartitionStartRecord, - ) - partition_end_record: PartitionEndRecord = proto.Field( - proto.MESSAGE, - number=4, - oneof='record', - message=PartitionEndRecord, - ) - partition_event_record: PartitionEventRecord = proto.Field( - proto.MESSAGE, - number=5, - oneof='record', - message=PartitionEventRecord, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py deleted file mode 100644 index 3771ef1f71..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/commit_response.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import transaction -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'CommitResponse', - }, -) - - -class CommitResponse(proto.Message): - r"""The response for [Commit][google.spanner.v1.Spanner.Commit]. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - The Cloud Spanner timestamp at which the - transaction committed. - commit_stats (google.cloud.spanner_v1.types.CommitResponse.CommitStats): - The statistics about this ``Commit``. Not returned by - default. For more information, see - [CommitRequest.return_commit_stats][google.spanner.v1.CommitRequest.return_commit_stats]. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - If specified, transaction has not committed - yet. You must retry the commit with the new - precommit token. - - This field is a member of `oneof`_ ``MultiplexedSessionRetry``. - snapshot_timestamp (google.protobuf.timestamp_pb2.Timestamp): - If ``TransactionOptions.isolation_level`` is set to - ``IsolationLevel.REPEATABLE_READ``, then the snapshot - timestamp is the timestamp at which all reads in the - transaction ran. This timestamp is never returned. - """ - - class CommitStats(proto.Message): - r"""Additional statistics about a commit. - - Attributes: - mutation_count (int): - The total number of mutations for the transaction. Knowing - the ``mutation_count`` value can help you maximize the - number of mutations in a transaction and minimize the number - of API round trips. You can also monitor this value to - prevent transactions from exceeding the system - `limit `__. - If the number of mutations exceeds the limit, the server - returns - `INVALID_ARGUMENT `__. - """ - - mutation_count: int = proto.Field( - proto.INT64, - number=1, - ) - - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - commit_stats: CommitStats = proto.Field( - proto.MESSAGE, - number=2, - message=CommitStats, - ) - precommit_token: transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=4, - oneof='MultiplexedSessionRetry', - message=transaction.MultiplexedSessionPrecommitToken, - ) - snapshot_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py deleted file mode 100644 index 54f745c9b7..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/keys.py +++ /dev/null @@ -1,248 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'KeyRange', - 'KeySet', - }, -) - - -class KeyRange(proto.Message): - r"""KeyRange represents a range of rows in a table or index. - - A range has a start key and an end key. These keys can be open or - closed, indicating if the range includes rows with that key. - - Keys are represented by lists, where the ith value in the list - corresponds to the ith component of the table or index primary key. - Individual values are encoded as described - [here][google.spanner.v1.TypeCode]. - - For example, consider the following table definition: - - :: - - CREATE TABLE UserEvents ( - UserName STRING(MAX), - EventDate STRING(10) - ) PRIMARY KEY(UserName, EventDate); - - The following keys name rows in this table: - - :: - - ["Bob", "2014-09-23"] - ["Alfred", "2015-06-12"] - - Since the ``UserEvents`` table's ``PRIMARY KEY`` clause names two - columns, each ``UserEvents`` key has two elements; the first is the - ``UserName``, and the second is the ``EventDate``. - - Key ranges with multiple components are interpreted - lexicographically by component using the table or index key's - declared sort order. For example, the following range returns all - events for user ``"Bob"`` that occurred in the year 2015: - - :: - - "start_closed": ["Bob", "2015-01-01"] - "end_closed": ["Bob", "2015-12-31"] - - Start and end keys can omit trailing key components. This affects - the inclusion and exclusion of rows that exactly match the provided - key components: if the key is closed, then rows that exactly match - the provided components are included; if the key is open, then rows - that exactly match are not included. - - For example, the following range includes all events for ``"Bob"`` - that occurred during and after the year 2000: - - :: - - "start_closed": ["Bob", "2000-01-01"] - "end_closed": ["Bob"] - - The next example retrieves all events for ``"Bob"``: - - :: - - "start_closed": ["Bob"] - "end_closed": ["Bob"] - - To retrieve events before the year 2000: - - :: - - "start_closed": ["Bob"] - "end_open": ["Bob", "2000-01-01"] - - The following range includes all rows in the table: - - :: - - "start_closed": [] - "end_closed": [] - - This range returns all users whose ``UserName`` begins with any - character from A to C: - - :: - - "start_closed": ["A"] - "end_open": ["D"] - - This range returns all users whose ``UserName`` begins with B: - - :: - - "start_closed": ["B"] - "end_open": ["C"] - - Key ranges honor column sort order. For example, suppose a table is - defined as follows: - - :: - - CREATE TABLE DescendingSortedTable { - Key INT64, - ... - ) PRIMARY KEY(Key DESC); - - The following range retrieves all rows with key values between 1 and - 100 inclusive: - - :: - - "start_closed": ["100"] - "end_closed": ["1"] - - Note that 100 is passed as the start, and 1 is passed as the end, - because ``Key`` is a descending column in the schema. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - start_closed (google.protobuf.struct_pb2.ListValue): - If the start is closed, then the range includes all rows - whose first ``len(start_closed)`` key columns exactly match - ``start_closed``. - - This field is a member of `oneof`_ ``start_key_type``. - start_open (google.protobuf.struct_pb2.ListValue): - If the start is open, then the range excludes rows whose - first ``len(start_open)`` key columns exactly match - ``start_open``. - - This field is a member of `oneof`_ ``start_key_type``. - end_closed (google.protobuf.struct_pb2.ListValue): - If the end is closed, then the range includes all rows whose - first ``len(end_closed)`` key columns exactly match - ``end_closed``. - - This field is a member of `oneof`_ ``end_key_type``. - end_open (google.protobuf.struct_pb2.ListValue): - If the end is open, then the range excludes rows whose first - ``len(end_open)`` key columns exactly match ``end_open``. - - This field is a member of `oneof`_ ``end_key_type``. - """ - - start_closed: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=1, - oneof='start_key_type', - message=struct_pb2.ListValue, - ) - start_open: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=2, - oneof='start_key_type', - message=struct_pb2.ListValue, - ) - end_closed: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=3, - oneof='end_key_type', - message=struct_pb2.ListValue, - ) - end_open: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=4, - oneof='end_key_type', - message=struct_pb2.ListValue, - ) - - -class KeySet(proto.Message): - r"""``KeySet`` defines a collection of Cloud Spanner keys and/or key - ranges. All the keys are expected to be in the same table or index. - The keys need not be sorted in any particular way. - - If the same key is specified multiple times in the set (for example - if two ranges, two keys, or a key and a range overlap), Cloud - Spanner behaves as if the key were only specified once. - - Attributes: - keys (MutableSequence[google.protobuf.struct_pb2.ListValue]): - A list of specific keys. Entries in ``keys`` should have - exactly as many elements as there are columns in the primary - or index key with which this ``KeySet`` is used. Individual - key values are encoded as described - [here][google.spanner.v1.TypeCode]. - ranges (MutableSequence[google.cloud.spanner_v1.types.KeyRange]): - A list of key ranges. See - [KeyRange][google.spanner.v1.KeyRange] for more information - about key range specifications. - all_ (bool): - For convenience ``all`` can be set to ``true`` to indicate - that this ``KeySet`` matches all keys in the table or index. - Note that any keys specified in ``keys`` or ``ranges`` are - only yielded once. - """ - - keys: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.ListValue, - ) - ranges: MutableSequence['KeyRange'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='KeyRange', - ) - all_: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py deleted file mode 100644 index 0d4f7b4466..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/mutation.py +++ /dev/null @@ -1,201 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import keys -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'Mutation', - }, -) - - -class Mutation(proto.Message): - r"""A modification to one or more Cloud Spanner rows. Mutations can be - applied to a Cloud Spanner database by sending them in a - [Commit][google.spanner.v1.Spanner.Commit] call. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - insert (google.cloud.spanner_v1.types.Mutation.Write): - Insert new rows in a table. If any of the rows already - exist, the write or transaction fails with error - ``ALREADY_EXISTS``. - - This field is a member of `oneof`_ ``operation``. - update (google.cloud.spanner_v1.types.Mutation.Write): - Update existing rows in a table. If any of the rows does not - already exist, the transaction fails with error - ``NOT_FOUND``. - - This field is a member of `oneof`_ ``operation``. - insert_or_update (google.cloud.spanner_v1.types.Mutation.Write): - Like [insert][google.spanner.v1.Mutation.insert], except - that if the row already exists, then its column values are - overwritten with the ones provided. Any column values not - explicitly written are preserved. - - When using - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], - just as when using - [insert][google.spanner.v1.Mutation.insert], all - ``NOT NULL`` columns in the table must be given a value. - This holds true even when the row already exists and will - therefore actually be updated. - - This field is a member of `oneof`_ ``operation``. - replace (google.cloud.spanner_v1.types.Mutation.Write): - Like [insert][google.spanner.v1.Mutation.insert], except - that if the row already exists, it is deleted, and the - column values provided are inserted instead. Unlike - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], - this means any values not explicitly written become - ``NULL``. - - In an interleaved table, if you create the child table with - the ``ON DELETE CASCADE`` annotation, then replacing a - parent row also deletes the child rows. Otherwise, you must - delete the child rows before you replace the parent row. - - This field is a member of `oneof`_ ``operation``. - delete (google.cloud.spanner_v1.types.Mutation.Delete): - Delete rows from a table. Succeeds whether or - not the named rows were present. - - This field is a member of `oneof`_ ``operation``. - """ - - class Write(proto.Message): - r"""Arguments to [insert][google.spanner.v1.Mutation.insert], - [update][google.spanner.v1.Mutation.update], - [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and - [replace][google.spanner.v1.Mutation.replace] operations. - - Attributes: - table (str): - Required. The table whose rows will be - written. - columns (MutableSequence[str]): - The names of the columns in - [table][google.spanner.v1.Mutation.Write.table] to be - written. - - The list of columns must contain enough columns to allow - Cloud Spanner to derive values for all primary key columns - in the row(s) to be modified. - values (MutableSequence[google.protobuf.struct_pb2.ListValue]): - The values to be written. ``values`` can contain more than - one list of values. If it does, then multiple rows are - written, one for each entry in ``values``. Each list in - ``values`` must have exactly as many entries as there are - entries in - [columns][google.spanner.v1.Mutation.Write.columns] above. - Sending multiple lists is equivalent to sending multiple - ``Mutation``\ s, each containing one ``values`` entry and - repeating [table][google.spanner.v1.Mutation.Write.table] - and [columns][google.spanner.v1.Mutation.Write.columns]. - Individual values in each list are encoded as described - [here][google.spanner.v1.TypeCode]. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - columns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - values: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=struct_pb2.ListValue, - ) - - class Delete(proto.Message): - r"""Arguments to [delete][google.spanner.v1.Mutation.delete] operations. - - Attributes: - table (str): - Required. The table whose rows will be - deleted. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. The primary keys of the rows within - [table][google.spanner.v1.Mutation.Delete.table] to delete. - The primary keys must be specified in the order in which - they appear in the ``PRIMARY KEY()`` clause of the table's - equivalent DDL statement (the DDL statement used to create - the table). Delete is idempotent. The transaction will - succeed even if some or all rows do not exist. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - key_set: keys.KeySet = proto.Field( - proto.MESSAGE, - number=2, - message=keys.KeySet, - ) - - insert: Write = proto.Field( - proto.MESSAGE, - number=1, - oneof='operation', - message=Write, - ) - update: Write = proto.Field( - proto.MESSAGE, - number=2, - oneof='operation', - message=Write, - ) - insert_or_update: Write = proto.Field( - proto.MESSAGE, - number=3, - oneof='operation', - message=Write, - ) - replace: Write = proto.Field( - proto.MESSAGE, - number=4, - oneof='operation', - message=Write, - ) - delete: Delete = proto.Field( - proto.MESSAGE, - number=5, - oneof='operation', - message=Delete, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py deleted file mode 100644 index 26e730d2d0..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/query_plan.py +++ /dev/null @@ -1,219 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'PlanNode', - 'QueryPlan', - }, -) - - -class PlanNode(proto.Message): - r"""Node information for nodes appearing in a - [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. - - Attributes: - index (int): - The ``PlanNode``'s index in [node - list][google.spanner.v1.QueryPlan.plan_nodes]. - kind (google.cloud.spanner_v1.types.PlanNode.Kind): - Used to determine the type of node. May be needed for - visualizing different kinds of nodes differently. For - example, If the node is a - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it - will have a condensed representation which can be used to - directly embed a description of the node in its parent. - display_name (str): - The display name for the node. - child_links (MutableSequence[google.cloud.spanner_v1.types.PlanNode.ChildLink]): - List of child node ``index``\ es and their relationship to - this parent. - short_representation (google.cloud.spanner_v1.types.PlanNode.ShortRepresentation): - Condensed representation for - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. - metadata (google.protobuf.struct_pb2.Struct): - Attributes relevant to the node contained in a group of - key-value pairs. For example, a Parameter Reference node - could have the following information in its metadata: - - :: - - { - "parameter_reference": "param1", - "parameter_type": "array" - } - execution_stats (google.protobuf.struct_pb2.Struct): - The execution statistics associated with the - node, contained in a group of key-value pairs. - Only present if the plan was returned as a - result of a profile query. For example, number - of executions, number of rows/time per execution - etc. - """ - class Kind(proto.Enum): - r"""The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes - between the two different kinds of nodes that can appear in a query - plan. - - Values: - KIND_UNSPECIFIED (0): - Not specified. - RELATIONAL (1): - Denotes a Relational operator node in the expression tree. - Relational operators represent iterative processing of rows - during query execution. For example, a ``TableScan`` - operation that reads rows from a table. - SCALAR (2): - Denotes a Scalar node in the expression tree. - Scalar nodes represent non-iterable entities in - the query plan. For example, constants or - arithmetic operators appearing inside predicate - expressions or references to column names. - """ - KIND_UNSPECIFIED = 0 - RELATIONAL = 1 - SCALAR = 2 - - class ChildLink(proto.Message): - r"""Metadata associated with a parent-child relationship appearing in a - [PlanNode][google.spanner.v1.PlanNode]. - - Attributes: - child_index (int): - The node to which the link points. - type_ (str): - The type of the link. For example, in Hash - Joins this could be used to distinguish between - the build child and the probe child, or in the - case of the child being an output variable, to - represent the tag associated with the output - variable. - variable (str): - Only present if the child node is - [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and - corresponds to an output variable of the parent node. The - field carries the name of the output variable. For example, - a ``TableScan`` operator that reads rows from a table will - have child links to the ``SCALAR`` nodes representing the - output variables created for each column that is read by the - operator. The corresponding ``variable`` fields will be set - to the variable names assigned to the columns. - """ - - child_index: int = proto.Field( - proto.INT32, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - variable: str = proto.Field( - proto.STRING, - number=3, - ) - - class ShortRepresentation(proto.Message): - r"""Condensed representation of a node and its subtree. Only present for - ``SCALAR`` [PlanNode(s)][google.spanner.v1.PlanNode]. - - Attributes: - description (str): - A string representation of the expression - subtree rooted at this node. - subqueries (MutableMapping[str, int]): - A mapping of (subquery variable name) -> (subquery node id) - for cases where the ``description`` string of this node - references a ``SCALAR`` subquery contained in the expression - subtree rooted at this node. The referenced ``SCALAR`` - subquery may not necessarily be a direct child of this node. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - ) - subqueries: MutableMapping[str, int] = proto.MapField( - proto.STRING, - proto.INT32, - number=2, - ) - - index: int = proto.Field( - proto.INT32, - number=1, - ) - kind: Kind = proto.Field( - proto.ENUM, - number=2, - enum=Kind, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - child_links: MutableSequence[ChildLink] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=ChildLink, - ) - short_representation: ShortRepresentation = proto.Field( - proto.MESSAGE, - number=5, - message=ShortRepresentation, - ) - metadata: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - execution_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=7, - message=struct_pb2.Struct, - ) - - -class QueryPlan(proto.Message): - r"""Contains an ordered list of nodes appearing in the query - plan. - - Attributes: - plan_nodes (MutableSequence[google.cloud.spanner_v1.types.PlanNode]): - The nodes in the query plan. Plan nodes are returned in - pre-order starting with the plan root. Each - [PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds - to its index in ``plan_nodes``. - """ - - plan_nodes: MutableSequence['PlanNode'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PlanNode', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py deleted file mode 100644 index 9503d0c172..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/result_set.py +++ /dev/null @@ -1,379 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import query_plan as gs_query_plan -from google.cloud.spanner_v1.types import transaction as gs_transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'ResultSet', - 'PartialResultSet', - 'ResultSetMetadata', - 'ResultSetStats', - }, -) - - -class ResultSet(proto.Message): - r"""Results from [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. - - Attributes: - metadata (google.cloud.spanner_v1.types.ResultSetMetadata): - Metadata about the result set, such as row - type information. - rows (MutableSequence[google.protobuf.struct_pb2.ListValue]): - Each element in ``rows`` is a row whose format is defined by - [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. - The ith element in each row matches the ith field in - [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. - Elements are encoded based on type as described - [here][google.spanner.v1.TypeCode]. - stats (google.cloud.spanner_v1.types.ResultSetStats): - Query plan and execution statistics for the SQL statement - that produced this result set. These can be requested by - setting - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - DML statements always produce stats containing the number of - rows modified, unless executed using the - [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - Other fields might or might not be populated, based on the - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token is included if the read-write - transaction is on a multiplexed session. Pass the precommit - token with the highest sequence number from this transaction - attempt to the [Commit][google.spanner.v1.Spanner.Commit] - request for this transaction. - """ - - metadata: 'ResultSetMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='ResultSetMetadata', - ) - rows: MutableSequence[struct_pb2.ListValue] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct_pb2.ListValue, - ) - stats: 'ResultSetStats' = proto.Field( - proto.MESSAGE, - number=3, - message='ResultSetStats', - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=5, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - - -class PartialResultSet(proto.Message): - r"""Partial results from a streaming read or SQL query. Streaming - reads and SQL queries better tolerate large result sets, large - rows, and large values, but are a little trickier to consume. - - Attributes: - metadata (google.cloud.spanner_v1.types.ResultSetMetadata): - Metadata about the result set, such as row - type information. Only present in the first - response. - values (MutableSequence[google.protobuf.struct_pb2.Value]): - A streamed result set consists of a stream of values, which - might be split into many ``PartialResultSet`` messages to - accommodate large rows and/or large values. Every N complete - values defines a row, where N is equal to the number of - entries in - [metadata.row_type.fields][google.spanner.v1.StructType.fields]. - - Most values are encoded based on type as described - [here][google.spanner.v1.TypeCode]. - - It's possible that the last value in values is "chunked", - meaning that the rest of the value is sent in subsequent - ``PartialResultSet``\ (s). This is denoted by the - [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] - field. Two or more chunked values can be merged to form a - complete value as follows: - - - ``bool/number/null``: can't be chunked - - ``string``: concatenate the strings - - ``list``: concatenate the lists. If the last element in a - list is a ``string``, ``list``, or ``object``, merge it - with the first element in the next list by applying these - rules recursively. - - ``object``: concatenate the (field name, field value) - pairs. If a field name is duplicated, then apply these - rules recursively to merge the field values. - - Some examples of merging: - - :: - - Strings are concatenated. - "foo", "bar" => "foobar" - - Lists of non-strings are concatenated. - [2, 3], [4] => [2, 3, 4] - - Lists are concatenated, but the last and first elements are merged - because they are strings. - ["a", "b"], ["c", "d"] => ["a", "bc", "d"] - - Lists are concatenated, but the last and first elements are merged - because they are lists. Recursively, the last and first elements - of the inner lists are merged because they are strings. - ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] - - Non-overlapping object fields are combined. - {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} - - Overlapping object fields are merged. - {"a": "1"}, {"a": "2"} => {"a": "12"} - - Examples of merging objects containing lists of strings. - {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} - - For a more complete example, suppose a streaming SQL query - is yielding a result set whose rows contain a single string - field. The following ``PartialResultSet``\ s might be - yielded: - - :: - - { - "metadata": { ... } - "values": ["Hello", "W"] - "chunked_value": true - "resume_token": "Af65..." - } - { - "values": ["orl"] - "chunked_value": true - } - { - "values": ["d"] - "resume_token": "Zx1B..." - } - - This sequence of ``PartialResultSet``\ s encodes two rows, - one containing the field value ``"Hello"``, and a second - containing the field value ``"World" = "W" + "orl" + "d"``. - - Not all ``PartialResultSet``\ s contain a ``resume_token``. - Execution can only be resumed from a previously yielded - ``resume_token``. For the above sequence of - ``PartialResultSet``\ s, resuming the query with - ``"resume_token": "Af65..."`` yields results from the - ``PartialResultSet`` with value "orl". - chunked_value (bool): - If true, then the final value in - [values][google.spanner.v1.PartialResultSet.values] is - chunked, and must be combined with more values from - subsequent ``PartialResultSet``\ s to obtain a complete - field value. - resume_token (bytes): - Streaming calls might be interrupted for a variety of - reasons, such as TCP connection loss. If this occurs, the - stream of results can be resumed by re-sending the original - request and including ``resume_token``. Note that executing - any other transaction in the same session invalidates the - token. - stats (google.cloud.spanner_v1.types.ResultSetStats): - Query plan and execution statistics for the statement that - produced this streaming result set. These can be requested - by setting - [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] - and are sent only once with the last response in the stream. - This field is also present in the last response for DML - statements. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token is included if the read-write - transaction has multiplexed sessions enabled. Pass the - precommit token with the highest sequence number from this - transaction attempt to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. - last (bool): - Optional. Indicates whether this is the last - ``PartialResultSet`` in the stream. The server might - optionally set this field. Clients shouldn't rely on this - field being set in all cases. - """ - - metadata: 'ResultSetMetadata' = proto.Field( - proto.MESSAGE, - number=1, - message='ResultSetMetadata', - ) - values: MutableSequence[struct_pb2.Value] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - chunked_value: bool = proto.Field( - proto.BOOL, - number=3, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=4, - ) - stats: 'ResultSetStats' = proto.Field( - proto.MESSAGE, - number=5, - message='ResultSetStats', - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=8, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - last: bool = proto.Field( - proto.BOOL, - number=9, - ) - - -class ResultSetMetadata(proto.Message): - r"""Metadata about a [ResultSet][google.spanner.v1.ResultSet] or - [PartialResultSet][google.spanner.v1.PartialResultSet]. - - Attributes: - row_type (google.cloud.spanner_v1.types.StructType): - Indicates the field names and types for the rows in the - result set. For example, a SQL query like - ``"SELECT UserId, UserName FROM Users"`` could return a - ``row_type`` value like: - - :: - - "fields": [ - { "name": "UserId", "type": { "code": "INT64" } }, - { "name": "UserName", "type": { "code": "STRING" } }, - ] - transaction (google.cloud.spanner_v1.types.Transaction): - If the read or SQL query began a transaction - as a side-effect, the information about the new - transaction is yielded here. - undeclared_parameters (google.cloud.spanner_v1.types.StructType): - A SQL query can be parameterized. In PLAN mode, these - parameters can be undeclared. This indicates the field names - and types for those undeclared parameters in the SQL query. - For example, a SQL query like - ``"SELECT * FROM Users where UserId = @userId and UserName = @userName "`` - could return a ``undeclared_parameters`` value like: - - :: - - "fields": [ - { "name": "UserId", "type": { "code": "INT64" } }, - { "name": "UserName", "type": { "code": "STRING" } }, - ] - """ - - row_type: gs_type.StructType = proto.Field( - proto.MESSAGE, - number=1, - message=gs_type.StructType, - ) - transaction: gs_transaction.Transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.Transaction, - ) - undeclared_parameters: gs_type.StructType = proto.Field( - proto.MESSAGE, - number=3, - message=gs_type.StructType, - ) - - -class ResultSetStats(proto.Message): - r"""Additional statistics about a - [ResultSet][google.spanner.v1.ResultSet] or - [PartialResultSet][google.spanner.v1.PartialResultSet]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_plan (google.cloud.spanner_v1.types.QueryPlan): - [QueryPlan][google.spanner.v1.QueryPlan] for the query - associated with this result. - query_stats (google.protobuf.struct_pb2.Struct): - Aggregated statistics from the execution of the query. Only - present when the query is profiled. For example, a query - could return the statistics as follows: - - :: - - { - "rows_returned": "3", - "elapsed_time": "1.22 secs", - "cpu_time": "1.19 secs" - } - row_count_exact (int): - Standard DML returns an exact count of rows - that were modified. - - This field is a member of `oneof`_ ``row_count``. - row_count_lower_bound (int): - Partitioned DML doesn't offer exactly-once - semantics, so it returns a lower bound of the - rows modified. - - This field is a member of `oneof`_ ``row_count``. - """ - - query_plan: gs_query_plan.QueryPlan = proto.Field( - proto.MESSAGE, - number=1, - message=gs_query_plan.QueryPlan, - ) - query_stats: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - row_count_exact: int = proto.Field( - proto.INT64, - number=3, - oneof='row_count', - ) - row_count_lower_bound: int = proto.Field( - proto.INT64, - number=4, - oneof='row_count', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py deleted file mode 100644 index 2742de8269..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/spanner.py +++ /dev/null @@ -1,1782 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_v1.types import keys -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import transaction as gs_transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'CreateSessionRequest', - 'BatchCreateSessionsRequest', - 'BatchCreateSessionsResponse', - 'Session', - 'GetSessionRequest', - 'ListSessionsRequest', - 'ListSessionsResponse', - 'DeleteSessionRequest', - 'RequestOptions', - 'DirectedReadOptions', - 'ExecuteSqlRequest', - 'ExecuteBatchDmlRequest', - 'ExecuteBatchDmlResponse', - 'PartitionOptions', - 'PartitionQueryRequest', - 'PartitionReadRequest', - 'Partition', - 'PartitionResponse', - 'ReadRequest', - 'BeginTransactionRequest', - 'CommitRequest', - 'RollbackRequest', - 'BatchWriteRequest', - 'BatchWriteResponse', - }, -) - - -class CreateSessionRequest(proto.Message): - r"""The request for - [CreateSession][google.spanner.v1.Spanner.CreateSession]. - - Attributes: - database (str): - Required. The database in which the new - session is created. - session (google.cloud.spanner_v1.types.Session): - Required. The session to create. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - session: 'Session' = proto.Field( - proto.MESSAGE, - number=2, - message='Session', - ) - - -class BatchCreateSessionsRequest(proto.Message): - r"""The request for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - Attributes: - database (str): - Required. The database in which the new - sessions are created. - session_template (google.cloud.spanner_v1.types.Session): - Parameters to apply to each created session. - session_count (int): - Required. The number of sessions to be created in this batch - call. The API can return fewer than the requested number of - sessions. If a specific number of sessions are desired, the - client can make additional calls to ``BatchCreateSessions`` - (adjusting - [session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count] - as necessary). - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - session_template: 'Session' = proto.Field( - proto.MESSAGE, - number=2, - message='Session', - ) - session_count: int = proto.Field( - proto.INT32, - number=3, - ) - - -class BatchCreateSessionsResponse(proto.Message): - r"""The response for - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]. - - Attributes: - session (MutableSequence[google.cloud.spanner_v1.types.Session]): - The freshly created sessions. - """ - - session: MutableSequence['Session'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Session', - ) - - -class Session(proto.Message): - r"""A session in the Cloud Spanner API. - - Attributes: - name (str): - Output only. The name of the session. This is - always system-assigned. - labels (MutableMapping[str, str]): - The labels for the session. - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 64 labels can be associated with a given - session. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp when the session - is created. - approximate_last_use_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The approximate timestamp when - the session is last used. It's typically earlier - than the actual last use time. - creator_role (str): - The database role which created this session. - multiplexed (bool): - Optional. If ``true``, specifies a multiplexed session. Use - a multiplexed session for multiple, concurrent read-only - operations. Don't use them for read-write transactions, - partitioned reads, or partitioned queries. Use - [``sessions.create``][google.spanner.v1.Spanner.CreateSession] - to create multiplexed sessions. Don't use - [BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions] - to create a multiplexed session. You can't delete or list - multiplexed sessions. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - approximate_last_use_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - creator_role: str = proto.Field( - proto.STRING, - number=5, - ) - multiplexed: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class GetSessionRequest(proto.Message): - r"""The request for [GetSession][google.spanner.v1.Spanner.GetSession]. - - Attributes: - name (str): - Required. The name of the session to - retrieve. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSessionsRequest(proto.Message): - r"""The request for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Attributes: - database (str): - Required. The database in which to list - sessions. - page_size (int): - Number of sessions to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] - from a previous - [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. - filter (str): - An expression for filtering the results of the request. - Filter rules are case insensitive. The fields eligible for - filtering are: - - - ``labels.key`` where key is the name of a label - - Some examples of using filters are: - - - ``labels.env:*`` --> The session has the label "env". - - ``labels.env:dev`` --> The session has the label "env" and - the value of the label contains the string "dev". - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSessionsResponse(proto.Message): - r"""The response for - [ListSessions][google.spanner.v1.Spanner.ListSessions]. - - Attributes: - sessions (MutableSequence[google.cloud.spanner_v1.types.Session]): - The list of requested sessions. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListSessions][google.spanner.v1.Spanner.ListSessions] call - to fetch more of the matching sessions. - """ - - @property - def raw_page(self): - return self - - sessions: MutableSequence['Session'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Session', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteSessionRequest(proto.Message): - r"""The request for - [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. - - Attributes: - name (str): - Required. The name of the session to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RequestOptions(proto.Message): - r"""Common request options for various APIs. - - Attributes: - priority (google.cloud.spanner_v1.types.RequestOptions.Priority): - Priority for the request. - request_tag (str): - A per-request tag which can be applied to queries or reads, - used for statistics collection. Both ``request_tag`` and - ``transaction_tag`` can be specified for a read or query - that belongs to a transaction. This field is ignored for - requests where it's not applicable (for example, - ``CommitRequest``). Legal characters for ``request_tag`` - values are all printable characters (ASCII 32 - 126) and the - length of a request_tag is limited to 50 characters. Values - that exceed this limit are truncated. Any leading underscore - (\_) characters are removed from the string. - transaction_tag (str): - A tag used for statistics collection about this transaction. - Both ``request_tag`` and ``transaction_tag`` can be - specified for a read or query that belongs to a transaction. - The value of transaction_tag should be the same for all - requests belonging to the same transaction. If this request - doesn't belong to any transaction, ``transaction_tag`` is - ignored. Legal characters for ``transaction_tag`` values are - all printable characters (ASCII 32 - 126) and the length of - a ``transaction_tag`` is limited to 50 characters. Values - that exceed this limit are truncated. Any leading underscore - (\_) characters are removed from the string. - """ - class Priority(proto.Enum): - r"""The relative priority for requests. Note that priority isn't - applicable for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - - The priority acts as a hint to the Cloud Spanner scheduler and - doesn't guarantee priority or order of execution. For example: - - - Some parts of a write operation always execute at - ``PRIORITY_HIGH``, regardless of the specified priority. This can - cause you to see an increase in high priority workload even when - executing a low priority request. This can also potentially cause - a priority inversion where a lower priority request is fulfilled - ahead of a higher priority request. - - If a transaction contains multiple operations with different - priorities, Cloud Spanner doesn't guarantee to process the higher - priority operations first. There might be other constraints to - satisfy, such as the order of operations. - - Values: - PRIORITY_UNSPECIFIED (0): - ``PRIORITY_UNSPECIFIED`` is equivalent to ``PRIORITY_HIGH``. - PRIORITY_LOW (1): - This specifies that the request is low - priority. - PRIORITY_MEDIUM (2): - This specifies that the request is medium - priority. - PRIORITY_HIGH (3): - This specifies that the request is high - priority. - """ - PRIORITY_UNSPECIFIED = 0 - PRIORITY_LOW = 1 - PRIORITY_MEDIUM = 2 - PRIORITY_HIGH = 3 - - priority: Priority = proto.Field( - proto.ENUM, - number=1, - enum=Priority, - ) - request_tag: str = proto.Field( - proto.STRING, - number=2, - ) - transaction_tag: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DirectedReadOptions(proto.Message): - r"""The ``DirectedReadOptions`` can be used to indicate which replicas - or regions should be used for non-transactional reads or queries. - - ``DirectedReadOptions`` can only be specified for a read-only - transaction, otherwise the API returns an ``INVALID_ARGUMENT`` - error. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - include_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.IncludeReplicas): - ``Include_replicas`` indicates the order of replicas (as - they appear in this list) to process the request. If - ``auto_failover_disabled`` is set to ``true`` and all - replicas are exhausted without finding a healthy replica, - Spanner waits for a replica in the list to become available, - requests might fail due to ``DEADLINE_EXCEEDED`` errors. - - This field is a member of `oneof`_ ``replicas``. - exclude_replicas (google.cloud.spanner_v1.types.DirectedReadOptions.ExcludeReplicas): - ``Exclude_replicas`` indicates that specified replicas - should be excluded from serving requests. Spanner doesn't - route requests to the replicas in this list. - - This field is a member of `oneof`_ ``replicas``. - """ - - class ReplicaSelection(proto.Message): - r"""The directed read replica selector. Callers must provide one or more - of the following fields for replica selection: - - - ``location`` - The location must be one of the regions within the - multi-region configuration of your database. - - ``type`` - The type of the replica. - - Some examples of using replica_selectors are: - - - ``location:us-east1`` --> The "us-east1" replica(s) of any - available type is used to process the request. - - ``type:READ_ONLY`` --> The "READ_ONLY" type replica(s) in the - nearest available location are used to process the request. - - ``location:us-east1 type:READ_ONLY`` --> The "READ_ONLY" type - replica(s) in location "us-east1" is used to process the request. - - Attributes: - location (str): - The location or region of the serving - requests, for example, "us-east1". - type_ (google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection.Type): - The type of replica. - """ - class Type(proto.Enum): - r"""Indicates the type of replica. - - Values: - TYPE_UNSPECIFIED (0): - Not specified. - READ_WRITE (1): - Read-write replicas support both reads and - writes. - READ_ONLY (2): - Read-only replicas only support reads (not - writes). - """ - TYPE_UNSPECIFIED = 0 - READ_WRITE = 1 - READ_ONLY = 2 - - location: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'DirectedReadOptions.ReplicaSelection.Type' = proto.Field( - proto.ENUM, - number=2, - enum='DirectedReadOptions.ReplicaSelection.Type', - ) - - class IncludeReplicas(proto.Message): - r"""An ``IncludeReplicas`` contains a repeated set of - ``ReplicaSelection`` which indicates the order in which replicas - should be considered. - - Attributes: - replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): - The directed read replica selector. - auto_failover_disabled (bool): - If ``true``, Spanner doesn't route requests to a replica - outside the <``include_replicas`` list when all of the - specified replicas are unavailable or unhealthy. Default - value is ``false``. - """ - - replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DirectedReadOptions.ReplicaSelection', - ) - auto_failover_disabled: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class ExcludeReplicas(proto.Message): - r"""An ExcludeReplicas contains a repeated set of - ReplicaSelection that should be excluded from serving requests. - - Attributes: - replica_selections (MutableSequence[google.cloud.spanner_v1.types.DirectedReadOptions.ReplicaSelection]): - The directed read replica selector. - """ - - replica_selections: MutableSequence['DirectedReadOptions.ReplicaSelection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DirectedReadOptions.ReplicaSelection', - ) - - include_replicas: IncludeReplicas = proto.Field( - proto.MESSAGE, - number=1, - oneof='replicas', - message=IncludeReplicas, - ) - exclude_replicas: ExcludeReplicas = proto.Field( - proto.MESSAGE, - number=2, - oneof='replicas', - message=ExcludeReplicas, - ) - - -class ExecuteSqlRequest(proto.Message): - r"""The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] - and - [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. - - Attributes: - session (str): - Required. The session in which the SQL query - should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - The transaction to use. - - For queries, if none is provided, the default is - a temporary read-only transaction with strong - concurrency. - - Standard DML statements require a read-write - transaction. To protect against replays, - single-use transactions are not supported. The - caller must either supply an existing - transaction ID or begin a new transaction. - - Partitioned DML requires an existing Partitioned - DML transaction ID. - sql (str): - Required. The SQL string. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - SQL string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names must conform to the naming - requirements of identifiers as specified at - https://cloud.google.com/spanner/docs/lexical#identifiers. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It's an error to execute a SQL statement with unbound - parameters. - param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): - It isn't always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON - strings. - - In these cases, you can use ``param_types`` to specify the - exact SQL type for some or all of the SQL statement - parameters. See the definition of - [Type][google.spanner.v1.Type] for more information about - SQL types. - resume_token (bytes): - If this request is resuming a previously interrupted SQL - statement execution, ``resume_token`` should be copied from - the last - [PartialResultSet][google.spanner.v1.PartialResultSet] - yielded before the interruption. Doing this enables the new - SQL statement execution to resume where the last one left - off. The rest of the request parameters must exactly match - the request that yielded this token. - query_mode (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryMode): - Used to control the amount of debugging information returned - in [ResultSetStats][google.spanner.v1.ResultSetStats]. If - [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] - is set, - [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] - can only be set to - [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. - partition_token (bytes): - If present, results are restricted to the specified - partition previously created using ``PartitionQuery``. There - must be an exact match for the values of fields common to - this message and the ``PartitionQueryRequest`` message used - to create this ``partition_token``. - seqno (int): - A per-transaction sequence number used to - identify this request. This field makes each - request idempotent such that if the request is - received multiple times, at most one succeeds. - - The sequence number must be monotonically - increasing within the transaction. If a request - arrives for the first time with an out-of-order - sequence number, the transaction can be aborted. - Replays of previously handled requests yield the - same response as the first execution. - - Required for DML statements. Ignored for - queries. - query_options (google.cloud.spanner_v1.types.ExecuteSqlRequest.QueryOptions): - Query optimizer configuration to use for the - given query. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): - Directed read options for this request. - data_boost_enabled (bool): - If this is for a partitioned query and this field is set to - ``true``, the request is executed with Spanner Data Boost - independent compute resources. - - If the field is set to ``true`` but the request doesn't set - ``partition_token``, the API returns an ``INVALID_ARGUMENT`` - error. - last_statement (bool): - Optional. If set to ``true``, this statement marks the end - of the transaction. After this statement executes, you must - commit or abort the transaction. Attempts to execute any - other requests against this transaction (including reads and - queries) are rejected. - - For DML statements, setting this option might cause some - error reporting to be deferred until commit time (for - example, validation of unique constraints). Given this, - successful execution of a DML statement shouldn't be assumed - until a subsequent ``Commit`` call completes successfully. - """ - class QueryMode(proto.Enum): - r"""Mode in which the statement must be processed. - - Values: - NORMAL (0): - The default mode. Only the statement results - are returned. - PLAN (1): - This mode returns only the query plan, - without any results or execution statistics - information. - PROFILE (2): - This mode returns the query plan, overall - execution statistics, operator level execution - statistics along with the results. This has a - performance overhead compared to the other - modes. It isn't recommended to use this mode for - production traffic. - WITH_STATS (3): - This mode returns the overall (but not - operator-level) execution statistics along with - the results. - WITH_PLAN_AND_STATS (4): - This mode returns the query plan, overall - (but not operator-level) execution statistics - along with the results. - """ - NORMAL = 0 - PLAN = 1 - PROFILE = 2 - WITH_STATS = 3 - WITH_PLAN_AND_STATS = 4 - - class QueryOptions(proto.Message): - r"""Query optimizer configuration. - - Attributes: - optimizer_version (str): - An option to control the selection of optimizer version. - - This parameter allows individual queries to pick different - query optimizer versions. - - Specifying ``latest`` as a value instructs Cloud Spanner to - use the latest supported query optimizer version. If not - specified, Cloud Spanner uses the optimizer version set at - the database level options. Any other positive integer (from - the list of supported optimizer versions) overrides the - default optimizer version for query execution. - - The list of supported optimizer versions can be queried from - ``SPANNER_SYS.SUPPORTED_OPTIMIZER_VERSIONS``. - - Executing a SQL statement with an invalid optimizer version - fails with an ``INVALID_ARGUMENT`` error. - - See - https://cloud.google.com/spanner/docs/query-optimizer/manage-query-optimizer - for more information on managing the query optimizer. - - The ``optimizer_version`` statement hint has precedence over - this setting. - optimizer_statistics_package (str): - An option to control the selection of optimizer statistics - package. - - This parameter allows individual queries to use a different - query optimizer statistics package. - - Specifying ``latest`` as a value instructs Cloud Spanner to - use the latest generated statistics package. If not - specified, Cloud Spanner uses the statistics package set at - the database level options, or the latest package if the - database option isn't set. - - The statistics package requested by the query has to be - exempt from garbage collection. This can be achieved with - the following DDL statement: - - .. code:: sql - - ALTER STATISTICS SET OPTIONS (allow_gc=false) - - The list of available statistics packages can be queried - from ``INFORMATION_SCHEMA.SPANNER_STATISTICS``. - - Executing a SQL statement with an invalid optimizer - statistics package or with a statistics package that allows - garbage collection fails with an ``INVALID_ARGUMENT`` error. - """ - - optimizer_version: str = proto.Field( - proto.STRING, - number=1, - ) - optimizer_statistics_package: str = proto.Field( - proto.STRING, - number=2, - ) - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - sql: str = proto.Field( - proto.STRING, - number=3, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Struct, - ) - param_types: MutableMapping[str, gs_type.Type] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message=gs_type.Type, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=6, - ) - query_mode: QueryMode = proto.Field( - proto.ENUM, - number=7, - enum=QueryMode, - ) - partition_token: bytes = proto.Field( - proto.BYTES, - number=8, - ) - seqno: int = proto.Field( - proto.INT64, - number=9, - ) - query_options: QueryOptions = proto.Field( - proto.MESSAGE, - number=10, - message=QueryOptions, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=11, - message='RequestOptions', - ) - directed_read_options: 'DirectedReadOptions' = proto.Field( - proto.MESSAGE, - number=15, - message='DirectedReadOptions', - ) - data_boost_enabled: bool = proto.Field( - proto.BOOL, - number=16, - ) - last_statement: bool = proto.Field( - proto.BOOL, - number=17, - ) - - -class ExecuteBatchDmlRequest(proto.Message): - r"""The request for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - - Attributes: - session (str): - Required. The session in which the DML - statements should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Required. The transaction to use. Must be a - read-write transaction. - To protect against replays, single-use - transactions are not supported. The caller must - either supply an existing transaction ID or - begin a new transaction. - statements (MutableSequence[google.cloud.spanner_v1.types.ExecuteBatchDmlRequest.Statement]): - Required. The list of statements to execute in this batch. - Statements are executed serially, such that the effects of - statement ``i`` are visible to statement ``i+1``. Each - statement must be a DML statement. Execution stops at the - first failed statement; the remaining statements are not - executed. - - Callers must provide at least one statement. - seqno (int): - Required. A per-transaction sequence number - used to identify this request. This field makes - each request idempotent such that if the request - is received multiple times, at most one - succeeds. - - The sequence number must be monotonically - increasing within the transaction. If a request - arrives for the first time with an out-of-order - sequence number, the transaction might be - aborted. Replays of previously handled requests - yield the same response as the first execution. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - last_statements (bool): - Optional. If set to ``true``, this request marks the end of - the transaction. After these statements execute, you must - commit or abort the transaction. Attempts to execute any - other requests against this transaction (including reads and - queries) are rejected. - - Setting this option might cause some error reporting to be - deferred until commit time (for example, validation of - unique constraints). Given this, successful execution of - statements shouldn't be assumed until a subsequent - ``Commit`` call completes successfully. - """ - - class Statement(proto.Message): - r"""A single DML statement. - - Attributes: - sql (str): - Required. The DML string. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - DML string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names can contain letters, - numbers, and underscores. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It's an error to execute a SQL statement with unbound - parameters. - param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): - It isn't always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] - as JSON strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL statement - parameters. See the definition of - [Type][google.spanner.v1.Type] for more information about - SQL types. - """ - - sql: str = proto.Field( - proto.STRING, - number=1, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - param_types: MutableMapping[str, gs_type.Type] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message=gs_type.Type, - ) - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - statements: MutableSequence[Statement] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Statement, - ) - seqno: int = proto.Field( - proto.INT64, - number=4, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=5, - message='RequestOptions', - ) - last_statements: bool = proto.Field( - proto.BOOL, - number=6, - ) - - -class ExecuteBatchDmlResponse(proto.Message): - r"""The response for - [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. - Contains a list of [ResultSet][google.spanner.v1.ResultSet] - messages, one for each DML statement that has successfully executed, - in the same order as the statements in the request. If a statement - fails, the status in the response body identifies the cause of the - failure. - - To check for DML statements that failed, use the following approach: - - 1. Check the status in the response message. The - [google.rpc.Code][google.rpc.Code] enum value ``OK`` indicates - that all statements were executed successfully. - 2. If the status was not ``OK``, check the number of result sets in - the response. If the response contains ``N`` - [ResultSet][google.spanner.v1.ResultSet] messages, then statement - ``N+1`` in the request failed. - - Example 1: - - - Request: 5 DML statements, all executed successfully. - - Response: 5 [ResultSet][google.spanner.v1.ResultSet] messages, - with the status ``OK``. - - Example 2: - - - Request: 5 DML statements. The third statement has a syntax error. - - Response: 2 [ResultSet][google.spanner.v1.ResultSet] messages, and - a syntax error (``INVALID_ARGUMENT``) status. The number of - [ResultSet][google.spanner.v1.ResultSet] messages indicates that - the third statement failed, and the fourth and fifth statements - were not executed. - - Attributes: - result_sets (MutableSequence[google.cloud.spanner_v1.types.ResultSet]): - One [ResultSet][google.spanner.v1.ResultSet] for each - statement in the request that ran successfully, in the same - order as the statements in the request. Each - [ResultSet][google.spanner.v1.ResultSet] does not contain - any rows. The - [ResultSetStats][google.spanner.v1.ResultSetStats] in each - [ResultSet][google.spanner.v1.ResultSet] contain the number - of rows modified by the statement. - - Only the first [ResultSet][google.spanner.v1.ResultSet] in - the response contains valid - [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. - status (google.rpc.status_pb2.Status): - If all DML statements are executed successfully, the status - is ``OK``. Otherwise, the error status of the first failed - statement. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. A precommit token is included if the read-write - transaction is on a multiplexed session. Pass the precommit - token with the highest sequence number from this transaction - attempt should be passed to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. - """ - - result_sets: MutableSequence[result_set.ResultSet] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=result_set.ResultSet, - ) - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=3, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - - -class PartitionOptions(proto.Message): - r"""Options for a ``PartitionQueryRequest`` and - ``PartitionReadRequest``. - - Attributes: - partition_size_bytes (int): - **Note:** This hint is currently ignored by - ``PartitionQuery`` and ``PartitionRead`` requests. - - The desired data size for each partition generated. The - default for this option is currently 1 GiB. This is only a - hint. The actual size of each partition can be smaller or - larger than this size request. - max_partitions (int): - **Note:** This hint is currently ignored by - ``PartitionQuery`` and ``PartitionRead`` requests. - - The desired maximum number of partitions to return. For - example, this might be set to the number of workers - available. The default for this option is currently 10,000. - The maximum value is currently 200,000. This is only a hint. - The actual number of partitions returned can be smaller or - larger than this maximum count request. - """ - - partition_size_bytes: int = proto.Field( - proto.INT64, - number=1, - ) - max_partitions: int = proto.Field( - proto.INT64, - number=2, - ) - - -class PartitionQueryRequest(proto.Message): - r"""The request for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] - - Attributes: - session (str): - Required. The session used to create the - partitions. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Read-only snapshot transactions are - supported, read and write and single-use - transactions are not. - sql (str): - Required. The query request to generate partitions for. The - request fails if the query isn't root partitionable. For a - query to be root partitionable, it needs to satisfy a few - conditions. For example, if the query execution plan - contains a distributed union operator, then it must be the - first operator in the plan. For more information about other - conditions, see `Read data in - parallel `__. - - The query request must not contain DML commands, such as - ``INSERT``, ``UPDATE``, or ``DELETE``. Use - [``ExecuteStreamingSql``][google.spanner.v1.Spanner.ExecuteStreamingSql] - with a ``PartitionedDml`` transaction for large, - partition-friendly DML operations. - params (google.protobuf.struct_pb2.Struct): - Parameter names and values that bind to placeholders in the - SQL string. - - A parameter placeholder consists of the ``@`` character - followed by the parameter name (for example, - ``@firstName``). Parameter names can contain letters, - numbers, and underscores. - - Parameters can appear anywhere that a literal value is - expected. The same parameter name can be used more than - once, for example: - - ``"WHERE id > @msg_id AND id < @msg_id + 100"`` - - It's an error to execute a SQL statement with unbound - parameters. - param_types (MutableMapping[str, google.cloud.spanner_v1.types.Type]): - It isn't always possible for Cloud Spanner to infer the - right SQL type from a JSON value. For example, values of - type ``BYTES`` and values of type ``STRING`` both appear in - [params][google.spanner.v1.PartitionQueryRequest.params] as - JSON strings. - - In these cases, ``param_types`` can be used to specify the - exact SQL type for some or all of the SQL query parameters. - See the definition of [Type][google.spanner.v1.Type] for - more information about SQL types. - partition_options (google.cloud.spanner_v1.types.PartitionOptions): - Additional options that affect how many - partitions are created. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - sql: str = proto.Field( - proto.STRING, - number=3, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Struct, - ) - param_types: MutableMapping[str, gs_type.Type] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=5, - message=gs_type.Type, - ) - partition_options: 'PartitionOptions' = proto.Field( - proto.MESSAGE, - number=6, - message='PartitionOptions', - ) - - -class PartitionReadRequest(proto.Message): - r"""The request for - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - Attributes: - session (str): - Required. The session used to create the - partitions. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - Read only snapshot transactions are - supported, read/write and single use - transactions are not. - table (str): - Required. The name of the table in the - database to be read. - index (str): - If non-empty, the name of an index on - [table][google.spanner.v1.PartitionReadRequest.table]. This - index is used instead of the table primary key when - interpreting - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - and sorting result rows. See - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - for further information. - columns (MutableSequence[str]): - The columns of - [table][google.spanner.v1.PartitionReadRequest.table] to be - returned for each row matching this request. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. ``key_set`` identifies the rows to be yielded. - ``key_set`` names the primary keys of the rows in - [table][google.spanner.v1.PartitionReadRequest.table] to be - yielded, unless - [index][google.spanner.v1.PartitionReadRequest.index] is - present. If - [index][google.spanner.v1.PartitionReadRequest.index] is - present, then - [key_set][google.spanner.v1.PartitionReadRequest.key_set] - instead names index keys in - [index][google.spanner.v1.PartitionReadRequest.index]. - - It isn't an error for the ``key_set`` to name rows that - don't exist in the database. Read yields nothing for - nonexistent rows. - partition_options (google.cloud.spanner_v1.types.PartitionOptions): - Additional options that affect how many - partitions are created. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - table: str = proto.Field( - proto.STRING, - number=3, - ) - index: str = proto.Field( - proto.STRING, - number=4, - ) - columns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - key_set: keys.KeySet = proto.Field( - proto.MESSAGE, - number=6, - message=keys.KeySet, - ) - partition_options: 'PartitionOptions' = proto.Field( - proto.MESSAGE, - number=9, - message='PartitionOptions', - ) - - -class Partition(proto.Message): - r"""Information returned for each partition returned in a - PartitionResponse. - - Attributes: - partition_token (bytes): - This token can be passed to ``Read``, ``StreamingRead``, - ``ExecuteSql``, or ``ExecuteStreamingSql`` requests to - restrict the results to those identified by this partition - token. - """ - - partition_token: bytes = proto.Field( - proto.BYTES, - number=1, - ) - - -class PartitionResponse(proto.Message): - r"""The response for - [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] or - [PartitionRead][google.spanner.v1.Spanner.PartitionRead] - - Attributes: - partitions (MutableSequence[google.cloud.spanner_v1.types.Partition]): - Partitions created by this request. - transaction (google.cloud.spanner_v1.types.Transaction): - Transaction created by this request. - """ - - partitions: MutableSequence['Partition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Partition', - ) - transaction: gs_transaction.Transaction = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.Transaction, - ) - - -class ReadRequest(proto.Message): - r"""The request for [Read][google.spanner.v1.Spanner.Read] and - [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. - - Attributes: - session (str): - Required. The session in which the read - should be performed. - transaction (google.cloud.spanner_v1.types.TransactionSelector): - The transaction to use. If none is provided, - the default is a temporary read-only transaction - with strong concurrency. - table (str): - Required. The name of the table in the - database to be read. - index (str): - If non-empty, the name of an index on - [table][google.spanner.v1.ReadRequest.table]. This index is - used instead of the table primary key when interpreting - [key_set][google.spanner.v1.ReadRequest.key_set] and sorting - result rows. See - [key_set][google.spanner.v1.ReadRequest.key_set] for further - information. - columns (MutableSequence[str]): - Required. The columns of - [table][google.spanner.v1.ReadRequest.table] to be returned - for each row matching this request. - key_set (google.cloud.spanner_v1.types.KeySet): - Required. ``key_set`` identifies the rows to be yielded. - ``key_set`` names the primary keys of the rows in - [table][google.spanner.v1.ReadRequest.table] to be yielded, - unless [index][google.spanner.v1.ReadRequest.index] is - present. If [index][google.spanner.v1.ReadRequest.index] is - present, then - [key_set][google.spanner.v1.ReadRequest.key_set] instead - names index keys in - [index][google.spanner.v1.ReadRequest.index]. - - If the - [partition_token][google.spanner.v1.ReadRequest.partition_token] - field is empty, rows are yielded in table primary key order - (if [index][google.spanner.v1.ReadRequest.index] is empty) - or index key order (if - [index][google.spanner.v1.ReadRequest.index] is non-empty). - If the - [partition_token][google.spanner.v1.ReadRequest.partition_token] - field isn't empty, rows are yielded in an unspecified order. - - It isn't an error for the ``key_set`` to name rows that - don't exist in the database. Read yields nothing for - nonexistent rows. - limit (int): - If greater than zero, only the first ``limit`` rows are - yielded. If ``limit`` is zero, the default is no limit. A - limit can't be specified if ``partition_token`` is set. - resume_token (bytes): - If this request is resuming a previously interrupted read, - ``resume_token`` should be copied from the last - [PartialResultSet][google.spanner.v1.PartialResultSet] - yielded before the interruption. Doing this enables the new - read to resume where the last read left off. The rest of the - request parameters must exactly match the request that - yielded this token. - partition_token (bytes): - If present, results are restricted to the specified - partition previously created using ``PartitionRead``. There - must be an exact match for the values of fields common to - this message and the PartitionReadRequest message used to - create this partition_token. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - directed_read_options (google.cloud.spanner_v1.types.DirectedReadOptions): - Directed read options for this request. - data_boost_enabled (bool): - If this is for a partitioned read and this field is set to - ``true``, the request is executed with Spanner Data Boost - independent compute resources. - - If the field is set to ``true`` but the request doesn't set - ``partition_token``, the API returns an ``INVALID_ARGUMENT`` - error. - order_by (google.cloud.spanner_v1.types.ReadRequest.OrderBy): - Optional. Order for the returned rows. - - By default, Spanner returns result rows in primary key order - except for PartitionRead requests. For applications that - don't require rows to be returned in primary key - (``ORDER_BY_PRIMARY_KEY``) order, setting - ``ORDER_BY_NO_ORDER`` option allows Spanner to optimize row - retrieval, resulting in lower latencies in certain cases - (for example, bulk point lookups). - lock_hint (google.cloud.spanner_v1.types.ReadRequest.LockHint): - Optional. Lock Hint for the request, it can - only be used with read-write transactions. - """ - class OrderBy(proto.Enum): - r"""An option to control the order in which rows are returned - from a read. - - Values: - ORDER_BY_UNSPECIFIED (0): - Default value. - - ``ORDER_BY_UNSPECIFIED`` is equivalent to - ``ORDER_BY_PRIMARY_KEY``. - ORDER_BY_PRIMARY_KEY (1): - Read rows are returned in primary key order. - - In the event that this option is used in conjunction with - the ``partition_token`` field, the API returns an - ``INVALID_ARGUMENT`` error. - ORDER_BY_NO_ORDER (2): - Read rows are returned in any order. - """ - ORDER_BY_UNSPECIFIED = 0 - ORDER_BY_PRIMARY_KEY = 1 - ORDER_BY_NO_ORDER = 2 - - class LockHint(proto.Enum): - r"""A lock hint mechanism for reads done within a transaction. - - Values: - LOCK_HINT_UNSPECIFIED (0): - Default value. - - ``LOCK_HINT_UNSPECIFIED`` is equivalent to - ``LOCK_HINT_SHARED``. - LOCK_HINT_SHARED (1): - Acquire shared locks. - - By default when you perform a read as part of a read-write - transaction, Spanner acquires shared read locks, which - allows other reads to still access the data until your - transaction is ready to commit. When your transaction is - committing and writes are being applied, the transaction - attempts to upgrade to an exclusive lock for any data you - are writing. For more information about locks, see `Lock - modes `__. - LOCK_HINT_EXCLUSIVE (2): - Acquire exclusive locks. - - Requesting exclusive locks is beneficial if you observe high - write contention, which means you notice that multiple - transactions are concurrently trying to read and write to - the same data, resulting in a large number of aborts. This - problem occurs when two transactions initially acquire - shared locks and then both try to upgrade to exclusive locks - at the same time. In this situation both transactions are - waiting for the other to give up their lock, resulting in a - deadlocked situation. Spanner is able to detect this - occurring and force one of the transactions to abort. - However, this is a slow and expensive operation and results - in lower performance. In this case it makes sense to acquire - exclusive locks at the start of the transaction because then - when multiple transactions try to act on the same data, they - automatically get serialized. Each transaction waits its - turn to acquire the lock and avoids getting into deadlock - situations. - - Because the exclusive lock hint is just a hint, it shouldn't - be considered equivalent to a mutex. In other words, you - shouldn't use Spanner exclusive locks as a mutual exclusion - mechanism for the execution of code outside of Spanner. - - **Note:** Request exclusive locks judiciously because they - block others from reading that data for the entire - transaction, rather than just when the writes are being - performed. Unless you observe high write contention, you - should use the default of shared read locks so you don't - prematurely block other clients from reading the data that - you're writing to. - """ - LOCK_HINT_UNSPECIFIED = 0 - LOCK_HINT_SHARED = 1 - LOCK_HINT_EXCLUSIVE = 2 - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction: gs_transaction.TransactionSelector = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionSelector, - ) - table: str = proto.Field( - proto.STRING, - number=3, - ) - index: str = proto.Field( - proto.STRING, - number=4, - ) - columns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - key_set: keys.KeySet = proto.Field( - proto.MESSAGE, - number=6, - message=keys.KeySet, - ) - limit: int = proto.Field( - proto.INT64, - number=8, - ) - resume_token: bytes = proto.Field( - proto.BYTES, - number=9, - ) - partition_token: bytes = proto.Field( - proto.BYTES, - number=10, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=11, - message='RequestOptions', - ) - directed_read_options: 'DirectedReadOptions' = proto.Field( - proto.MESSAGE, - number=14, - message='DirectedReadOptions', - ) - data_boost_enabled: bool = proto.Field( - proto.BOOL, - number=15, - ) - order_by: OrderBy = proto.Field( - proto.ENUM, - number=16, - enum=OrderBy, - ) - lock_hint: LockHint = proto.Field( - proto.ENUM, - number=17, - enum=LockHint, - ) - - -class BeginTransactionRequest(proto.Message): - r"""The request for - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. - - Attributes: - session (str): - Required. The session in which the - transaction runs. - options (google.cloud.spanner_v1.types.TransactionOptions): - Required. Options for the new transaction. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. Priority is ignored for - this request. Setting the priority in this - ``request_options`` struct doesn't do anything. To set the - priority for a transaction, set it on the reads and writes - that are part of this transaction instead. - mutation_key (google.cloud.spanner_v1.types.Mutation): - Optional. Required for read-write - transactions on a multiplexed session that - commit mutations but don't perform any reads or - queries. You must randomly select one of the - mutations from the mutation set and send it as a - part of this request. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - options: gs_transaction.TransactionOptions = proto.Field( - proto.MESSAGE, - number=2, - message=gs_transaction.TransactionOptions, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='RequestOptions', - ) - mutation_key: mutation.Mutation = proto.Field( - proto.MESSAGE, - number=4, - message=mutation.Mutation, - ) - - -class CommitRequest(proto.Message): - r"""The request for [Commit][google.spanner.v1.Spanner.Commit]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - session (str): - Required. The session in which the - transaction to be committed is running. - transaction_id (bytes): - Commit a previously-started transaction. - - This field is a member of `oneof`_ ``transaction``. - single_use_transaction (google.cloud.spanner_v1.types.TransactionOptions): - Execute mutations in a temporary transaction. Note that - unlike commit of a previously-started transaction, commit - with a temporary transaction is non-idempotent. That is, if - the ``CommitRequest`` is sent to Cloud Spanner more than - once (for instance, due to retries in the application, or in - the transport library), it's possible that the mutations are - executed more than once. If this is undesirable, use - [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction] - and [Commit][google.spanner.v1.Spanner.Commit] instead. - - This field is a member of `oneof`_ ``transaction``. - mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): - The mutations to be executed when this - transaction commits. All mutations are applied - atomically, in the order they appear in this - list. - return_commit_stats (bool): - If ``true``, then statistics related to the transaction is - included in the - [CommitResponse][google.spanner.v1.CommitResponse.commit_stats]. - Default value is ``false``. - max_commit_delay (google.protobuf.duration_pb2.Duration): - Optional. The amount of latency this request - is configured to incur in order to improve - throughput. If this field isn't set, Spanner - assumes requests are relatively latency - sensitive and automatically determines an - appropriate delay time. You can specify a commit - delay value between 0 and 500 ms. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - Optional. If the read-write transaction was executed on a - multiplexed session, then you must include the precommit - token with the highest sequence number received in this - transaction attempt. Failing to do so results in a - ``FailedPrecondition`` error. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction_id: bytes = proto.Field( - proto.BYTES, - number=2, - oneof='transaction', - ) - single_use_transaction: gs_transaction.TransactionOptions = proto.Field( - proto.MESSAGE, - number=3, - oneof='transaction', - message=gs_transaction.TransactionOptions, - ) - mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=mutation.Mutation, - ) - return_commit_stats: bool = proto.Field( - proto.BOOL, - number=5, - ) - max_commit_delay: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=6, - message='RequestOptions', - ) - precommit_token: gs_transaction.MultiplexedSessionPrecommitToken = proto.Field( - proto.MESSAGE, - number=9, - message=gs_transaction.MultiplexedSessionPrecommitToken, - ) - - -class RollbackRequest(proto.Message): - r"""The request for [Rollback][google.spanner.v1.Spanner.Rollback]. - - Attributes: - session (str): - Required. The session in which the - transaction to roll back is running. - transaction_id (bytes): - Required. The transaction to roll back. - """ - - session: str = proto.Field( - proto.STRING, - number=1, - ) - transaction_id: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class BatchWriteRequest(proto.Message): - r"""The request for [BatchWrite][google.spanner.v1.Spanner.BatchWrite]. - - Attributes: - session (str): - Required. The session in which the batch - request is to be run. - request_options (google.cloud.spanner_v1.types.RequestOptions): - Common options for this request. - mutation_groups (MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]): - Required. The groups of mutations to be - applied. - exclude_txn_from_change_streams (bool): - Optional. If you don't set the - ``exclude_txn_from_change_streams`` option or if it's set to - ``false``, then any change streams monitoring columns - modified by transactions will capture the updates made - within that transaction. - """ - - class MutationGroup(proto.Message): - r"""A group of mutations to be committed together. Related - mutations should be placed in a group. For example, two - mutations inserting rows with the same primary key prefix in - both parent and child tables are related. - - Attributes: - mutations (MutableSequence[google.cloud.spanner_v1.types.Mutation]): - Required. The mutations in this group. - """ - - mutations: MutableSequence[mutation.Mutation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=mutation.Mutation, - ) - - session: str = proto.Field( - proto.STRING, - number=1, - ) - request_options: 'RequestOptions' = proto.Field( - proto.MESSAGE, - number=3, - message='RequestOptions', - ) - mutation_groups: MutableSequence[MutationGroup] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=MutationGroup, - ) - exclude_txn_from_change_streams: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class BatchWriteResponse(proto.Message): - r"""The result of applying a batch of mutations. - - Attributes: - indexes (MutableSequence[int]): - The mutation groups applied in this batch. The values index - into the ``mutation_groups`` field in the corresponding - ``BatchWriteRequest``. - status (google.rpc.status_pb2.Status): - An ``OK`` status indicates success. Any other status - indicates a failure. - commit_timestamp (google.protobuf.timestamp_pb2.Timestamp): - The commit timestamp of the transaction that applied this - batch. Present if ``status`` is ``OK``, absent otherwise. - """ - - indexes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=1, - ) - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - commit_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py deleted file mode 100644 index 5e13285565..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/transaction.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'TransactionOptions', - 'Transaction', - 'TransactionSelector', - 'MultiplexedSessionPrecommitToken', - }, -) - - -class TransactionOptions(proto.Message): - r"""Options to use for transactions. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - read_write (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite): - Transaction may write. - - Authorization to begin a read-write transaction requires - ``spanner.databases.beginOrRollbackReadWriteTransaction`` - permission on the ``session`` resource. - - This field is a member of `oneof`_ ``mode``. - partitioned_dml (google.cloud.spanner_v1.types.TransactionOptions.PartitionedDml): - Partitioned DML transaction. - - Authorization to begin a Partitioned DML transaction - requires - ``spanner.databases.beginPartitionedDmlTransaction`` - permission on the ``session`` resource. - - This field is a member of `oneof`_ ``mode``. - read_only (google.cloud.spanner_v1.types.TransactionOptions.ReadOnly): - Transaction does not write. - - Authorization to begin a read-only transaction requires - ``spanner.databases.beginReadOnlyTransaction`` permission on - the ``session`` resource. - - This field is a member of `oneof`_ ``mode``. - exclude_txn_from_change_streams (bool): - When ``exclude_txn_from_change_streams`` is set to ``true``, - it prevents read or write transactions from being tracked in - change streams. - - - If the DDL option ``allow_txn_exclusion`` is set to - ``true``, then the updates made within this transaction - aren't recorded in the change stream. - - - If you don't set the DDL option ``allow_txn_exclusion`` or - if it's set to ``false``, then the updates made within - this transaction are recorded in the change stream. - - When ``exclude_txn_from_change_streams`` is set to ``false`` - or not set, modifications from this transaction are recorded - in all change streams that are tracking columns modified by - these transactions. - - The ``exclude_txn_from_change_streams`` option can only be - specified for read-write or partitioned DML transactions, - otherwise the API returns an ``INVALID_ARGUMENT`` error. - isolation_level (google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel): - Isolation level for the transaction. - """ - class IsolationLevel(proto.Enum): - r"""``IsolationLevel`` is used when setting ``isolation_level`` for a - transaction. - - Values: - ISOLATION_LEVEL_UNSPECIFIED (0): - Default value. - - If the value is not specified, the ``SERIALIZABLE`` - isolation level is used. - SERIALIZABLE (1): - All transactions appear as if they executed in a serial - order, even if some of the reads, writes, and other - operations of distinct transactions actually occurred in - parallel. Spanner assigns commit timestamps that reflect the - order of committed transactions to implement this property. - Spanner offers a stronger guarantee than serializability - called external consistency. For more information, see - `TrueTime and external - consistency `__. - REPEATABLE_READ (2): - All reads performed during the transaction observe a - consistent snapshot of the database, and the transaction is - only successfully committed in the absence of conflicts - between its updates and any concurrent updates that have - occurred since that snapshot. Consequently, in contrast to - ``SERIALIZABLE`` transactions, only write-write conflicts - are detected in snapshot transactions. - - This isolation level does not support Read-only and - Partitioned DML transactions. - - When ``REPEATABLE_READ`` is specified on a read-write - transaction, the locking semantics default to - ``OPTIMISTIC``. - """ - ISOLATION_LEVEL_UNSPECIFIED = 0 - SERIALIZABLE = 1 - REPEATABLE_READ = 2 - - class ReadWrite(proto.Message): - r"""Message type to initiate a read-write transaction. Currently - this transaction type has no options. - - Attributes: - read_lock_mode (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite.ReadLockMode): - Read lock mode for the transaction. - multiplexed_session_previous_transaction_id (bytes): - Optional. Clients should pass the transaction - ID of the previous transaction attempt that was - aborted if this transaction is being executed on - a multiplexed session. - """ - class ReadLockMode(proto.Enum): - r"""``ReadLockMode`` is used to set the read lock mode for read-write - transactions. - - Values: - READ_LOCK_MODE_UNSPECIFIED (0): - Default value. - - - If isolation level is - [REPEATABLE_READ][google.spanner.v1.TransactionOptions.IsolationLevel.REPEATABLE_READ], - then it is an error to specify ``read_lock_mode``. Locking - semantics default to ``OPTIMISTIC``. No validation checks - are done for reads, except to validate that the data that - was served at the snapshot time is unchanged at commit - time in the following cases: - - 1. reads done as part of queries that use - ``SELECT FOR UPDATE`` - 2. reads done as part of statements with a - ``LOCK_SCANNED_RANGES`` hint - 3. reads done as part of DML statements - - - At all other isolation levels, if ``read_lock_mode`` is - the default value, then pessimistic read locks are used. - PESSIMISTIC (1): - Pessimistic lock mode. - - Read locks are acquired immediately on read. Semantics - described only applies to - [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] - isolation. - OPTIMISTIC (2): - Optimistic lock mode. - - Locks for reads within the transaction are not acquired on - read. Instead the locks are acquired on a commit to validate - that read/queried data has not changed since the transaction - started. Semantics described only applies to - [SERIALIZABLE][google.spanner.v1.TransactionOptions.IsolationLevel.SERIALIZABLE] - isolation. - """ - READ_LOCK_MODE_UNSPECIFIED = 0 - PESSIMISTIC = 1 - OPTIMISTIC = 2 - - read_lock_mode: 'TransactionOptions.ReadWrite.ReadLockMode' = proto.Field( - proto.ENUM, - number=1, - enum='TransactionOptions.ReadWrite.ReadLockMode', - ) - multiplexed_session_previous_transaction_id: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - class PartitionedDml(proto.Message): - r"""Message type to initiate a Partitioned DML transaction. - """ - - class ReadOnly(proto.Message): - r"""Message type to initiate a read-only transaction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - strong (bool): - Read at a timestamp where all previously - committed transactions are visible. - - This field is a member of `oneof`_ ``timestamp_bound``. - min_read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Executes all reads at a timestamp >= ``min_read_timestamp``. - - This is useful for requesting fresher data than some - previous read, or data that is fresh enough to observe the - effects of some previously committed transaction whose - timestamp is known. - - Note that this option can only be used in single-use - transactions. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - - This field is a member of `oneof`_ ``timestamp_bound``. - max_staleness (google.protobuf.duration_pb2.Duration): - Read data at a timestamp >= ``NOW - max_staleness`` seconds. - Guarantees that all writes that have committed more than the - specified number of seconds ago are visible. Because Cloud - Spanner chooses the exact timestamp, this mode works even if - the client's local clock is substantially skewed from Cloud - Spanner commit timestamps. - - Useful for reading the freshest data available at a nearby - replica, while bounding the possible staleness if the local - replica has fallen behind. - - Note that this option can only be used in single-use - transactions. - - This field is a member of `oneof`_ ``timestamp_bound``. - read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Executes all reads at the given timestamp. Unlike other - modes, reads at a specific timestamp are repeatable; the - same read at the same timestamp always returns the same - data. If the timestamp is in the future, the read is blocked - until the specified timestamp, modulo the read's deadline. - - Useful for large scale consistent reads such as mapreduces, - or for coordinating many reads against a consistent snapshot - of the data. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - - This field is a member of `oneof`_ ``timestamp_bound``. - exact_staleness (google.protobuf.duration_pb2.Duration): - Executes all reads at a timestamp that is - ``exact_staleness`` old. The timestamp is chosen soon after - the read is started. - - Guarantees that all writes that have committed more than the - specified number of seconds ago are visible. Because Cloud - Spanner chooses the exact timestamp, this mode works even if - the client's local clock is substantially skewed from Cloud - Spanner commit timestamps. - - Useful for reading at nearby replicas without the - distributed timestamp negotiation overhead of - ``max_staleness``. - - This field is a member of `oneof`_ ``timestamp_bound``. - return_read_timestamp (bool): - If true, the Cloud Spanner-selected read timestamp is - included in the [Transaction][google.spanner.v1.Transaction] - message that describes the transaction. - """ - - strong: bool = proto.Field( - proto.BOOL, - number=1, - oneof='timestamp_bound', - ) - min_read_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof='timestamp_bound', - message=timestamp_pb2.Timestamp, - ) - max_staleness: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - oneof='timestamp_bound', - message=duration_pb2.Duration, - ) - read_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof='timestamp_bound', - message=timestamp_pb2.Timestamp, - ) - exact_staleness: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=5, - oneof='timestamp_bound', - message=duration_pb2.Duration, - ) - return_read_timestamp: bool = proto.Field( - proto.BOOL, - number=6, - ) - - read_write: ReadWrite = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ReadWrite, - ) - partitioned_dml: PartitionedDml = proto.Field( - proto.MESSAGE, - number=3, - oneof='mode', - message=PartitionedDml, - ) - read_only: ReadOnly = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=ReadOnly, - ) - exclude_txn_from_change_streams: bool = proto.Field( - proto.BOOL, - number=5, - ) - isolation_level: IsolationLevel = proto.Field( - proto.ENUM, - number=6, - enum=IsolationLevel, - ) - - -class Transaction(proto.Message): - r"""A transaction. - - Attributes: - id (bytes): - ``id`` may be used to identify the transaction in subsequent - [Read][google.spanner.v1.Spanner.Read], - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], - [Commit][google.spanner.v1.Spanner.Commit], or - [Rollback][google.spanner.v1.Spanner.Rollback] calls. - - Single-use read-only transactions do not have IDs, because - single-use transactions do not support multiple requests. - read_timestamp (google.protobuf.timestamp_pb2.Timestamp): - For snapshot read-only transactions, the read timestamp - chosen for the transaction. Not returned by default: see - [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. - - A timestamp in RFC3339 UTC "Zulu" format, accurate to - nanoseconds. Example: ``"2014-10-02T15:01:23.045123456Z"``. - precommit_token (google.cloud.spanner_v1.types.MultiplexedSessionPrecommitToken): - A precommit token is included in the response of a - BeginTransaction request if the read-write transaction is on - a multiplexed session and a mutation_key was specified in - the - [BeginTransaction][google.spanner.v1.BeginTransactionRequest]. - The precommit token with the highest sequence number from - this transaction attempt should be passed to the - [Commit][google.spanner.v1.Spanner.Commit] request for this - transaction. - """ - - id: bytes = proto.Field( - proto.BYTES, - number=1, - ) - read_timestamp: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - precommit_token: 'MultiplexedSessionPrecommitToken' = proto.Field( - proto.MESSAGE, - number=3, - message='MultiplexedSessionPrecommitToken', - ) - - -class TransactionSelector(proto.Message): - r"""This message is used to select the transaction in which a - [Read][google.spanner.v1.Spanner.Read] or - [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. - - See [TransactionOptions][google.spanner.v1.TransactionOptions] for - more information about transactions. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - single_use (google.cloud.spanner_v1.types.TransactionOptions): - Execute the read or SQL query in a temporary - transaction. This is the most efficient way to - execute a transaction that consists of a single - SQL query. - - This field is a member of `oneof`_ ``selector``. - id (bytes): - Execute the read or SQL query in a - previously-started transaction. - - This field is a member of `oneof`_ ``selector``. - begin (google.cloud.spanner_v1.types.TransactionOptions): - Begin a new transaction and execute this read or SQL query - in it. The transaction ID of the new transaction is returned - in - [ResultSetMetadata.transaction][google.spanner.v1.ResultSetMetadata.transaction], - which is a [Transaction][google.spanner.v1.Transaction]. - - This field is a member of `oneof`_ ``selector``. - """ - - single_use: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=1, - oneof='selector', - message='TransactionOptions', - ) - id: bytes = proto.Field( - proto.BYTES, - number=2, - oneof='selector', - ) - begin: 'TransactionOptions' = proto.Field( - proto.MESSAGE, - number=3, - oneof='selector', - message='TransactionOptions', - ) - - -class MultiplexedSessionPrecommitToken(proto.Message): - r"""When a read-write transaction is executed on a multiplexed session, - this precommit token is sent back to the client as a part of the - [Transaction][google.spanner.v1.Transaction] message in the - [BeginTransaction][google.spanner.v1.BeginTransactionRequest] - response and also as a part of the - [ResultSet][google.spanner.v1.ResultSet] and - [PartialResultSet][google.spanner.v1.PartialResultSet] responses. - - Attributes: - precommit_token (bytes): - Opaque precommit token. - seq_num (int): - An incrementing seq number is generated on - every precommit token that is returned. Clients - should remember the precommit token with the - highest sequence number from the current - transaction attempt. - """ - - precommit_token: bytes = proto.Field( - proto.BYTES, - number=1, - ) - seq_num: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py b/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py deleted file mode 100644 index 5b6365599e..0000000000 --- a/owl-bot-staging/spanner/v1/google/cloud/spanner_v1/types/type.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.v1', - manifest={ - 'TypeCode', - 'TypeAnnotationCode', - 'Type', - 'StructType', - }, -) - - -class TypeCode(proto.Enum): - r"""``TypeCode`` is used as part of [Type][google.spanner.v1.Type] to - indicate the type of a Cloud Spanner value. - - Each legal value of a type can be encoded to or decoded from a JSON - value, using the encodings described below. All Cloud Spanner values - can be ``null``, regardless of type; ``null``\ s are always encoded - as a JSON ``null``. - - Values: - TYPE_CODE_UNSPECIFIED (0): - Not specified. - BOOL (1): - Encoded as JSON ``true`` or ``false``. - INT64 (2): - Encoded as ``string``, in decimal format. - FLOAT64 (3): - Encoded as ``number``, or the strings ``"NaN"``, - ``"Infinity"``, or ``"-Infinity"``. - FLOAT32 (15): - Encoded as ``number``, or the strings ``"NaN"``, - ``"Infinity"``, or ``"-Infinity"``. - TIMESTAMP (4): - Encoded as ``string`` in RFC 3339 timestamp format. The time - zone must be present, and must be ``"Z"``. - - If the schema has the column option - ``allow_commit_timestamp=true``, the placeholder string - ``"spanner.commit_timestamp()"`` can be used to instruct the - system to insert the commit timestamp associated with the - transaction commit. - DATE (5): - Encoded as ``string`` in RFC 3339 date format. - STRING (6): - Encoded as ``string``. - BYTES (7): - Encoded as a base64-encoded ``string``, as described in RFC - 4648, section 4. - ARRAY (8): - Encoded as ``list``, where the list elements are represented - according to - [array_element_type][google.spanner.v1.Type.array_element_type]. - STRUCT (9): - Encoded as ``list``, where list element ``i`` is represented - according to - [struct_type.fields[i]][google.spanner.v1.StructType.fields]. - NUMERIC (10): - Encoded as ``string``, in decimal format or scientific - notation format. Decimal format: ``[+-]Digits[.[Digits]]`` - or ``[+-][Digits].Digits`` - - Scientific notation: - ``[+-]Digits[.[Digits]][ExponentIndicator[+-]Digits]`` or - ``[+-][Digits].Digits[ExponentIndicator[+-]Digits]`` - (ExponentIndicator is ``"e"`` or ``"E"``) - JSON (11): - Encoded as a JSON-formatted ``string`` as described in RFC - 7159. The following rules are applied when parsing JSON - input: - - - Whitespace characters are not preserved. - - If a JSON object has duplicate keys, only the first key is - preserved. - - Members of a JSON object are not guaranteed to have their - order preserved. - - JSON array elements will have their order preserved. - PROTO (13): - Encoded as a base64-encoded ``string``, as described in RFC - 4648, section 4. - ENUM (14): - Encoded as ``string``, in decimal format. - INTERVAL (16): - Encoded as ``string``, in ``ISO8601`` duration format - - ``P[n]Y[n]M[n]DT[n]H[n]M[n[.fraction]]S`` where ``n`` is an - integer. For example, ``P1Y2M3DT4H5M6.5S`` represents time - duration of 1 year, 2 months, 3 days, 4 hours, 5 minutes, - and 6.5 seconds. - UUID (17): - Encoded as ``string``, in lower-case hexa-decimal format, as - described in RFC 9562, section 4. - """ - TYPE_CODE_UNSPECIFIED = 0 - BOOL = 1 - INT64 = 2 - FLOAT64 = 3 - FLOAT32 = 15 - TIMESTAMP = 4 - DATE = 5 - STRING = 6 - BYTES = 7 - ARRAY = 8 - STRUCT = 9 - NUMERIC = 10 - JSON = 11 - PROTO = 13 - ENUM = 14 - INTERVAL = 16 - UUID = 17 - - -class TypeAnnotationCode(proto.Enum): - r"""``TypeAnnotationCode`` is used as a part of - [Type][google.spanner.v1.Type] to disambiguate SQL types that should - be used for a given Cloud Spanner value. Disambiguation is needed - because the same Cloud Spanner type can be mapped to different SQL - types depending on SQL dialect. TypeAnnotationCode doesn't affect - the way value is serialized. - - Values: - TYPE_ANNOTATION_CODE_UNSPECIFIED (0): - Not specified. - PG_NUMERIC (2): - PostgreSQL compatible NUMERIC type. This annotation needs to - be applied to [Type][google.spanner.v1.Type] instances - having [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] type - code to specify that values of this type should be treated - as PostgreSQL NUMERIC values. Currently this annotation is - always needed for - [NUMERIC][google.spanner.v1.TypeCode.NUMERIC] when a client - interacts with PostgreSQL-enabled Spanner databases. - PG_JSONB (3): - PostgreSQL compatible JSONB type. This annotation needs to - be applied to [Type][google.spanner.v1.Type] instances - having [JSON][google.spanner.v1.TypeCode.JSON] type code to - specify that values of this type should be treated as - PostgreSQL JSONB values. Currently this annotation is always - needed for [JSON][google.spanner.v1.TypeCode.JSON] when a - client interacts with PostgreSQL-enabled Spanner databases. - PG_OID (4): - PostgreSQL compatible OID type. This - annotation can be used by a client interacting - with PostgreSQL-enabled Spanner database to - specify that a value should be treated using the - semantics of the OID type. - """ - TYPE_ANNOTATION_CODE_UNSPECIFIED = 0 - PG_NUMERIC = 2 - PG_JSONB = 3 - PG_OID = 4 - - -class Type(proto.Message): - r"""``Type`` indicates the type of a Cloud Spanner value, as might be - stored in a table cell or returned from an SQL query. - - Attributes: - code (google.cloud.spanner_v1.types.TypeCode): - Required. The [TypeCode][google.spanner.v1.TypeCode] for - this type. - array_element_type (google.cloud.spanner_v1.types.Type): - If [code][google.spanner.v1.Type.code] == - [ARRAY][google.spanner.v1.TypeCode.ARRAY], then - ``array_element_type`` is the type of the array elements. - struct_type (google.cloud.spanner_v1.types.StructType): - If [code][google.spanner.v1.Type.code] == - [STRUCT][google.spanner.v1.TypeCode.STRUCT], then - ``struct_type`` provides type information for the struct's - fields. - type_annotation (google.cloud.spanner_v1.types.TypeAnnotationCode): - The - [TypeAnnotationCode][google.spanner.v1.TypeAnnotationCode] - that disambiguates SQL type that Spanner will use to - represent values of this type during query processing. This - is necessary for some type codes because a single - [TypeCode][google.spanner.v1.TypeCode] can be mapped to - different SQL types depending on the SQL dialect. - [type_annotation][google.spanner.v1.Type.type_annotation] - typically is not needed to process the content of a value - (it doesn't affect serialization) and clients can ignore it - on the read path. - proto_type_fqn (str): - If [code][google.spanner.v1.Type.code] == - [PROTO][google.spanner.v1.TypeCode.PROTO] or - [code][google.spanner.v1.Type.code] == - [ENUM][google.spanner.v1.TypeCode.ENUM], then - ``proto_type_fqn`` is the fully qualified name of the proto - type representing the proto/enum definition. - """ - - code: 'TypeCode' = proto.Field( - proto.ENUM, - number=1, - enum='TypeCode', - ) - array_element_type: 'Type' = proto.Field( - proto.MESSAGE, - number=2, - message='Type', - ) - struct_type: 'StructType' = proto.Field( - proto.MESSAGE, - number=3, - message='StructType', - ) - type_annotation: 'TypeAnnotationCode' = proto.Field( - proto.ENUM, - number=4, - enum='TypeAnnotationCode', - ) - proto_type_fqn: str = proto.Field( - proto.STRING, - number=5, - ) - - -class StructType(proto.Message): - r"""``StructType`` defines the fields of a - [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. - - Attributes: - fields (MutableSequence[google.cloud.spanner_v1.types.StructType.Field]): - The list of fields that make up this struct. Order is - significant, because values of this struct type are - represented as lists, where the order of field values - matches the order of fields in the - [StructType][google.spanner.v1.StructType]. In turn, the - order of fields matches the order of columns in a read - request, or the order of fields in the ``SELECT`` clause of - a query. - """ - - class Field(proto.Message): - r"""Message representing a single field of a struct. - - Attributes: - name (str): - The name of the field. For reads, this is the column name. - For SQL queries, it is the column alias (e.g., ``"Word"`` in - the query ``"SELECT 'hello' AS Word"``), or the column name - (e.g., ``"ColName"`` in the query - ``"SELECT ColName FROM Table"``). Some columns might have an - empty name (e.g., ``"SELECT UPPER(ColName)"``). Note that a - query result can contain multiple fields with the same name. - type_ (google.cloud.spanner_v1.types.Type): - The type of the field. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'Type' = proto.Field( - proto.MESSAGE, - number=2, - message='Type', - ) - - fields: MutableSequence[Field] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Field, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner/v1/mypy.ini b/owl-bot-staging/spanner/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/spanner/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/spanner/v1/noxfile.py b/owl-bot-staging/spanner/v1/noxfile.py deleted file mode 100644 index 594241417c..0000000000 --- a/owl-bot-staging/spanner/v1/noxfile.py +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] - -DEFAULT_PYTHON_VERSION = "3.14" - -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): -# Switch this to Python 3.15 alpha1 -# https://peps.python.org/pep-0790/ -PREVIEW_PYTHON_VERSION = "3.14" - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-spanner" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=PREVIEW_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json deleted file mode 100644 index f7f33c3d29..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/snippet_metadata_google.spanner.v1.json +++ /dev/null @@ -1,2579 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.spanner.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-spanner", - "version": "0.0.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_create_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchCreateSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "session_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", - "shortName": "batch_create_sessions" - }, - "description": "Sample for BatchCreateSessions", - "file": "spanner_v1_generated_spanner_batch_create_sessions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_create_sessions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.batch_create_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchCreateSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchCreateSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchCreateSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "session_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.BatchCreateSessionsResponse", - "shortName": "batch_create_sessions" - }, - "description": "Sample for BatchCreateSessions", - "file": "spanner_v1_generated_spanner_batch_create_sessions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchCreateSessions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_create_sessions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.batch_write", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchWrite", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchWrite" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchWriteRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "mutation_groups", - "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", - "shortName": "batch_write" - }, - "description": "Sample for BatchWrite", - "file": "spanner_v1_generated_spanner_batch_write_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchWrite_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_write_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.batch_write", - "method": { - "fullName": "google.spanner.v1.Spanner.BatchWrite", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BatchWrite" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BatchWriteRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "mutation_groups", - "type": "MutableSequence[google.cloud.spanner_v1.types.BatchWriteRequest.MutationGroup]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.BatchWriteResponse]", - "shortName": "batch_write" - }, - "description": "Sample for BatchWrite", - "file": "spanner_v1_generated_spanner_batch_write_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BatchWrite_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_batch_write_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.begin_transaction", - "method": { - "fullName": "google.spanner.v1.Spanner.BeginTransaction", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "options", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Transaction", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "spanner_v1_generated_spanner_begin_transaction_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_begin_transaction_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.begin_transaction", - "method": { - "fullName": "google.spanner.v1.Spanner.BeginTransaction", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "BeginTransaction" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.BeginTransactionRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "options", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Transaction", - "shortName": "begin_transaction" - }, - "description": "Sample for BeginTransaction", - "file": "spanner_v1_generated_spanner_begin_transaction_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_BeginTransaction_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_begin_transaction_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.commit", - "method": { - "fullName": "google.spanner.v1.Spanner.Commit", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CommitRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "mutations", - "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" - }, - { - "name": "single_use_transaction", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "spanner_v1_generated_spanner_commit_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Commit_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_commit_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.commit", - "method": { - "fullName": "google.spanner.v1.Spanner.Commit", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Commit" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CommitRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "mutations", - "type": "MutableSequence[google.cloud.spanner_v1.types.Mutation]" - }, - { - "name": "single_use_transaction", - "type": "google.cloud.spanner_v1.types.TransactionOptions" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.CommitResponse", - "shortName": "commit" - }, - "description": "Sample for Commit", - "file": "spanner_v1_generated_spanner_commit_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Commit_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_commit_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.create_session", - "method": { - "fullName": "google.spanner.v1.Spanner.CreateSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "CreateSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CreateSessionRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "create_session" - }, - "description": "Sample for CreateSession", - "file": "spanner_v1_generated_spanner_create_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_CreateSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_create_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.create_session", - "method": { - "fullName": "google.spanner.v1.Spanner.CreateSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "CreateSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.CreateSessionRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "create_session" - }, - "description": "Sample for CreateSession", - "file": "spanner_v1_generated_spanner_create_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_CreateSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_create_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.delete_session", - "method": { - "fullName": "google.spanner.v1.Spanner.DeleteSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "DeleteSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_session" - }, - "description": "Sample for DeleteSession", - "file": "spanner_v1_generated_spanner_delete_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_DeleteSession_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_delete_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.delete_session", - "method": { - "fullName": "google.spanner.v1.Spanner.DeleteSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "DeleteSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.DeleteSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_session" - }, - "description": "Sample for DeleteSession", - "file": "spanner_v1_generated_spanner_delete_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_DeleteSession_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_delete_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_batch_dml", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteBatchDml" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", - "shortName": "execute_batch_dml" - }, - "description": "Sample for ExecuteBatchDml", - "file": "spanner_v1_generated_spanner_execute_batch_dml_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_batch_dml_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.execute_batch_dml", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteBatchDml", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteBatchDml" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteBatchDmlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ExecuteBatchDmlResponse", - "shortName": "execute_batch_dml" - }, - "description": "Sample for ExecuteBatchDml", - "file": "spanner_v1_generated_spanner_execute_batch_dml_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteBatchDml_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_batch_dml_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "execute_sql" - }, - "description": "Sample for ExecuteSql", - "file": "spanner_v1_generated_spanner_execute_sql_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_sql_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.execute_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "execute_sql" - }, - "description": "Sample for ExecuteSql", - "file": "spanner_v1_generated_spanner_execute_sql_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteSql_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_sql_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.execute_streaming_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteStreamingSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "execute_streaming_sql" - }, - "description": "Sample for ExecuteStreamingSql", - "file": "spanner_v1_generated_spanner_execute_streaming_sql_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_streaming_sql_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.execute_streaming_sql", - "method": { - "fullName": "google.spanner.v1.Spanner.ExecuteStreamingSql", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ExecuteStreamingSql" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ExecuteSqlRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "execute_streaming_sql" - }, - "description": "Sample for ExecuteStreamingSql", - "file": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ExecuteStreamingSql_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_execute_streaming_sql_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.get_session", - "method": { - "fullName": "google.spanner.v1.Spanner.GetSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "GetSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.GetSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "get_session" - }, - "description": "Sample for GetSession", - "file": "spanner_v1_generated_spanner_get_session_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_GetSession_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_get_session_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.get_session", - "method": { - "fullName": "google.spanner.v1.Spanner.GetSession", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "GetSession" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.GetSessionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.Session", - "shortName": "get_session" - }, - "description": "Sample for GetSession", - "file": "spanner_v1_generated_spanner_get_session_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_GetSession_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_get_session_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.list_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.ListSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ListSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsAsyncPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "spanner_v1_generated_spanner_list_sessions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ListSessions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_list_sessions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.list_sessions", - "method": { - "fullName": "google.spanner.v1.Spanner.ListSessions", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "ListSessions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ListSessionsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.services.spanner.pagers.ListSessionsPager", - "shortName": "list_sessions" - }, - "description": "Sample for ListSessions", - "file": "spanner_v1_generated_spanner_list_sessions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_ListSessions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_list_sessions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_query", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionQuery", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_query" - }, - "description": "Sample for PartitionQuery", - "file": "spanner_v1_generated_spanner_partition_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.partition_query", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionQuery", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionQueryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_query" - }, - "description": "Sample for PartitionQuery", - "file": "spanner_v1_generated_spanner_partition_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionQuery_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.partition_read", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_read" - }, - "description": "Sample for PartitionRead", - "file": "spanner_v1_generated_spanner_partition_read_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionRead_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_read_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.partition_read", - "method": { - "fullName": "google.spanner.v1.Spanner.PartitionRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "PartitionRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.PartitionReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.PartitionResponse", - "shortName": "partition_read" - }, - "description": "Sample for PartitionRead", - "file": "spanner_v1_generated_spanner_partition_read_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_PartitionRead_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_partition_read_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.read", - "method": { - "fullName": "google.spanner.v1.Spanner.Read", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Read" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "read" - }, - "description": "Sample for Read", - "file": "spanner_v1_generated_spanner_read_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Read_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_read_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.read", - "method": { - "fullName": "google.spanner.v1.Spanner.Read", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Read" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_v1.types.ResultSet", - "shortName": "read" - }, - "description": "Sample for Read", - "file": "spanner_v1_generated_spanner_read_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Read_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_read_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.rollback", - "method": { - "fullName": "google.spanner.v1.Spanner.Rollback", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.RollbackRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "spanner_v1_generated_spanner_rollback_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Rollback_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_rollback_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.rollback", - "method": { - "fullName": "google.spanner.v1.Spanner.Rollback", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "Rollback" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.RollbackRequest" - }, - { - "name": "session", - "type": "str" - }, - { - "name": "transaction_id", - "type": "bytes" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "rollback" - }, - "description": "Sample for Rollback", - "file": "spanner_v1_generated_spanner_rollback_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_Rollback_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_rollback_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient", - "shortName": "SpannerAsyncClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerAsyncClient.streaming_read", - "method": { - "fullName": "google.spanner.v1.Spanner.StreamingRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "StreamingRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "streaming_read" - }, - "description": "Sample for StreamingRead", - "file": "spanner_v1_generated_spanner_streaming_read_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_StreamingRead_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_streaming_read_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_v1.SpannerClient", - "shortName": "SpannerClient" - }, - "fullName": "google.cloud.spanner_v1.SpannerClient.streaming_read", - "method": { - "fullName": "google.spanner.v1.Spanner.StreamingRead", - "service": { - "fullName": "google.spanner.v1.Spanner", - "shortName": "Spanner" - }, - "shortName": "StreamingRead" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_v1.types.ReadRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "Iterable[google.cloud.spanner_v1.types.PartialResultSet]", - "shortName": "streaming_read" - }, - "description": "Sample for StreamingRead", - "file": "spanner_v1_generated_spanner_streaming_read_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_Spanner_StreamingRead_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_spanner_streaming_read_sync.py" - } - ] -} diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py deleted file mode 100644 index 49e64b4ab8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchCreateSessions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = await client.batch_create_sessions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BatchCreateSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py deleted file mode 100644 index ade1da3661..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_create_sessions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchCreateSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchCreateSessions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_batch_create_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BatchCreateSessionsRequest( - database="database_value", - session_count=1420, - ) - - # Make the request - response = client.batch_create_sessions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BatchCreateSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py deleted file mode 100644 index d1565657e8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchWrite -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchWrite_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = await client.batch_write(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_BatchWrite_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py deleted file mode 100644 index 9b6621def9..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_batch_write_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchWrite -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BatchWrite_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_batch_write(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - mutation_groups = spanner_v1.MutationGroup() - mutation_groups.mutations.insert.table = "table_value" - - request = spanner_v1.BatchWriteRequest( - session="session_value", - mutation_groups=mutation_groups, - ) - - # Make the request - stream = client.batch_write(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_BatchWrite_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py deleted file mode 100644 index efdd161715..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BeginTransaction_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = await client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BeginTransaction_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py deleted file mode 100644 index 764dab8aa2..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_begin_transaction_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BeginTransaction -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_BeginTransaction_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_begin_transaction(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.BeginTransactionRequest( - session="session_value", - ) - - # Make the request - response = client.begin_transaction(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_BeginTransaction_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py deleted file mode 100644 index f61c297d38..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Commit_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_commit(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = await client.commit(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Commit_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py deleted file mode 100644 index a945bd2234..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_commit_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Commit -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Commit_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_commit(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CommitRequest( - transaction_id=b'transaction_id_blob', - session="session_value", - ) - - # Make the request - response = client.commit(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Commit_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py deleted file mode 100644 index 8cddc00c66..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_CreateSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_create_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = await client.create_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_CreateSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py deleted file mode 100644 index b9de2d34e0..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_create_session_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_CreateSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_create_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.CreateSessionRequest( - database="database_value", - ) - - # Make the request - response = client.create_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_CreateSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py deleted file mode 100644 index 9fed1ddca6..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_DeleteSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - await client.delete_session(request=request) - - -# [END spanner_v1_generated_Spanner_DeleteSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py deleted file mode 100644 index 1f2a17e2d1..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_delete_session_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_DeleteSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_delete_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.DeleteSessionRequest( - name="name_value", - ) - - # Make the request - client.delete_session(request=request) - - -# [END spanner_v1_generated_Spanner_DeleteSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py deleted file mode 100644 index 8313fd66a0..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteBatchDml -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteBatchDml_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = await client.execute_batch_dml(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteBatchDml_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py deleted file mode 100644 index dd4696b6b2..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_batch_dml_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteBatchDml -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteBatchDml_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_execute_batch_dml(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - statements = spanner_v1.Statement() - statements.sql = "sql_value" - - request = spanner_v1.ExecuteBatchDmlRequest( - session="session_value", - statements=statements, - seqno=550, - ) - - # Make the request - response = client.execute_batch_dml(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteBatchDml_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py deleted file mode 100644 index a12b20f3e9..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteSql_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.execute_sql(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py deleted file mode 100644 index 761d0ca251..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_sql_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteSql_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_execute_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.execute_sql(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py deleted file mode 100644 index 86b8eb910e..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteStreamingSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = await client.execute_streaming_sql(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py deleted file mode 100644 index dc7dba43b8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_execute_streaming_sql_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExecuteStreamingSql -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_execute_streaming_sql(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ExecuteSqlRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - stream = client.execute_streaming_sql(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_ExecuteStreamingSql_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py deleted file mode 100644 index d2e50f9891..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_GetSession_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_get_session(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_GetSession_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py deleted file mode 100644 index 36d6436b04..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_get_session_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSession -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_GetSession_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_get_session(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.GetSessionRequest( - name="name_value", - ) - - # Make the request - response = client.get_session(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_GetSession_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py deleted file mode 100644 index 95aa4bf818..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ListSessions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_Spanner_ListSessions_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py deleted file mode 100644 index a9533fed0d..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_list_sessions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSessions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_ListSessions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_list_sessions(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ListSessionsRequest( - database="database_value", - ) - - # Make the request - page_result = client.list_sessions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_Spanner_ListSessions_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py deleted file mode 100644 index 200fb2f6a2..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = await client.partition_query(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionQuery_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py deleted file mode 100644 index d486a3590c..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_query_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_partition_query(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionQueryRequest( - session="session_value", - sql="sql_value", - ) - - # Make the request - response = client.partition_query(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionQuery_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py deleted file mode 100644 index 99055ade8b..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionRead_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = await client.partition_read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py deleted file mode 100644 index 0ca01ac423..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_partition_read_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for PartitionRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_PartitionRead_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_partition_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.PartitionReadRequest( - session="session_value", - table="table_value", - ) - - # Make the request - response = client.partition_read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_PartitionRead_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py deleted file mode 100644 index e555865245..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Read -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Read_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = await client.read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Read_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py deleted file mode 100644 index 8f9ee621f3..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_read_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Read -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Read_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - response = client.read(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_Spanner_Read_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py deleted file mode 100644 index f99a1b8dd8..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Rollback_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_rollback(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - await client.rollback(request=request) - - -# [END spanner_v1_generated_Spanner_Rollback_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py deleted file mode 100644 index 00b23b21fc..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_rollback_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Rollback -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_Rollback_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_rollback(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.RollbackRequest( - session="session_value", - transaction_id=b'transaction_id_blob', - ) - - # Make the request - client.rollback(request=request) - - -# [END spanner_v1_generated_Spanner_Rollback_sync] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py deleted file mode 100644 index f79b9a96a1..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamingRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_StreamingRead_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -async def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerAsyncClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = await client.streaming_read(request=request) - - # Handle the response - async for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_StreamingRead_async] diff --git a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py b/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py deleted file mode 100644 index f81ed34b33..0000000000 --- a/owl-bot-staging/spanner/v1/samples/generated_samples/spanner_v1_generated_spanner_streaming_read_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamingRead -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner - - -# [START spanner_v1_generated_Spanner_StreamingRead_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_v1 - - -def sample_streaming_read(): - # Create a client - client = spanner_v1.SpannerClient() - - # Initialize request argument(s) - request = spanner_v1.ReadRequest( - session="session_value", - table="table_value", - columns=['columns_value1', 'columns_value2'], - ) - - # Make the request - stream = client.streaming_read(request=request) - - # Handle the response - for response in stream: - print(response) - -# [END spanner_v1_generated_Spanner_StreamingRead_sync] diff --git a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py b/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py deleted file mode 100644 index c7f41be11e..0000000000 --- a/owl-bot-staging/spanner/v1/scripts/fixup_spanner_v1_keywords.py +++ /dev/null @@ -1,191 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spannerCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_create_sessions': ('database', 'session_count', 'session_template', ), - 'batch_write': ('session', 'mutation_groups', 'request_options', 'exclude_txn_from_change_streams', ), - 'begin_transaction': ('session', 'options', 'request_options', 'mutation_key', ), - 'commit': ('session', 'transaction_id', 'single_use_transaction', 'mutations', 'return_commit_stats', 'max_commit_delay', 'request_options', 'precommit_token', ), - 'create_session': ('database', 'session', ), - 'delete_session': ('name', ), - 'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', 'last_statements', ), - 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), - 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ), - 'get_session': ('name', ), - 'list_sessions': ('database', 'page_size', 'page_token', 'filter', ), - 'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ), - 'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ), - 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), - 'rollback': ('session', 'transaction_id', ), - 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spannerCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner/v1/setup.py b/owl-bot-staging/spanner/v1/setup.py deleted file mode 100644 index 41d728098c..0000000000 --- a/owl-bot-staging/spanner/v1/setup.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-spanner' - - -description = "Google Cloud Spanner API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/spanner/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "grpcio >= 1.33.2, < 2.0.0", - "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt deleted file mode 100644 index 1e93c60e50..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,12 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt deleted file mode 100644 index 1e93c60e50..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.14.txt +++ /dev/null @@ -1,12 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt deleted file mode 100644 index 5d29dea386..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt deleted file mode 100644 index 93e6826f2a..0000000000 --- a/owl-bot-staging/spanner/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf diff --git a/owl-bot-staging/spanner/v1/tests/__init__.py b/owl-bot-staging/spanner/v1/tests/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py b/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py deleted file mode 100644 index b90788ab0f..0000000000 --- a/owl-bot-staging/spanner/v1/tests/unit/gapic/spanner_v1/test_spanner.py +++ /dev/null @@ -1,11446 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_v1.services.spanner import SpannerAsyncClient -from google.cloud.spanner_v1.services.spanner import SpannerClient -from google.cloud.spanner_v1.services.spanner import pagers -from google.cloud.spanner_v1.services.spanner import transports -from google.cloud.spanner_v1.types import commit_response -from google.cloud.spanner_v1.types import keys -from google.cloud.spanner_v1.types import mutation -from google.cloud.spanner_v1.types import result_set -from google.cloud.spanner_v1.types import spanner -from google.cloud.spanner_v1.types import transaction -from google.cloud.spanner_v1.types import type as gs_type -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SpannerClient._get_default_mtls_endpoint(None) is None - assert SpannerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SpannerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert SpannerClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert SpannerClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert SpannerClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - SpannerClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert SpannerClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert SpannerClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert SpannerClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - SpannerClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert SpannerClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert SpannerClient._get_client_cert_source(None, False) is None - assert SpannerClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert SpannerClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert SpannerClient._get_client_cert_source(None, True) is mock_default_cert_source - assert SpannerClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = SpannerClient._DEFAULT_UNIVERSE - default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert SpannerClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == SpannerClient.DEFAULT_MTLS_ENDPOINT - assert SpannerClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert SpannerClient._get_api_endpoint(None, None, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT - assert SpannerClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == SpannerClient.DEFAULT_MTLS_ENDPOINT - assert SpannerClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert SpannerClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - SpannerClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert SpannerClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert SpannerClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert SpannerClient._get_universe_domain(None, None) == SpannerClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - SpannerClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = SpannerClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = SpannerClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (SpannerClient, "grpc"), - (SpannerAsyncClient, "grpc_asyncio"), - (SpannerClient, "rest"), -]) -def test_spanner_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SpannerGrpcTransport, "grpc"), - (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.SpannerRestTransport, "rest"), -]) -def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SpannerClient, "grpc"), - (SpannerAsyncClient, "grpc_asyncio"), - (SpannerClient, "rest"), -]) -def test_spanner_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -def test_spanner_client_get_transport_class(): - transport = SpannerClient.get_transport_class() - available_transports = [ - transports.SpannerGrpcTransport, - transports.SpannerRestTransport, - ] - assert transport in available_transports - - transport = SpannerClient.get_transport_class("grpc") - assert transport == transports.SpannerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), - (SpannerClient, transports.SpannerRestTransport, "rest"), -]) -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -def test_spanner_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SpannerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", "true"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SpannerClient, transports.SpannerGrpcTransport, "grpc", "false"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (SpannerClient, transports.SpannerRestTransport, "rest", "true"), - (SpannerClient, transports.SpannerRestTransport, "rest", "false"), -]) -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_spanner_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - SpannerClient, SpannerAsyncClient -]) -@mock.patch.object(SpannerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SpannerAsyncClient)) -def test_spanner_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - SpannerClient, SpannerAsyncClient -]) -@mock.patch.object(SpannerClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerClient)) -@mock.patch.object(SpannerAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SpannerAsyncClient)) -def test_spanner_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = SpannerClient._DEFAULT_UNIVERSE - default_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = SpannerClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc"), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"), - (SpannerClient, transports.SpannerRestTransport, "rest"), -]) -def test_spanner_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (SpannerClient, transports.SpannerRestTransport, "rest", None), -]) -def test_spanner_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_spanner_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SpannerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SpannerClient, transports.SpannerGrpcTransport, "grpc", grpc_helpers), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_spanner_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - scopes=None, - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.CreateSessionRequest, - dict, -]) -def test_create_session(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - response = client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.CreateSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -def test_create_session_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.CreateSessionRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_session(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CreateSessionRequest( - database='database_value', - ) - -def test_create_session_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_session] = mock_rpc - request = {} - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_session in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_session] = mock_rpc - - request = {} - await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_session_async(transport: str = 'grpc_asyncio', request_type=spanner.CreateSessionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - response = await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.CreateSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.asyncio -async def test_create_session_async_from_dict(): - await test_create_session_async(request_type=dict) - -def test_create_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CreateSessionRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = spanner.Session() - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CreateSessionRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - await client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_create_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_session( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_create_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_create_session_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_session( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchCreateSessionsRequest, - dict, -]) -def test_batch_create_sessions(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse( - ) - response = client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.BatchCreateSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -def test_batch_create_sessions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.BatchCreateSessionsRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_create_sessions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchCreateSessionsRequest( - database='database_value', - ) - -def test_batch_create_sessions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_create_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc - request = {} - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_create_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_create_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_create_sessions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_create_sessions] = mock_rpc - - request = {} - await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_create_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_create_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchCreateSessionsRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( - )) - response = await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.BatchCreateSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -@pytest.mark.asyncio -async def test_batch_create_sessions_async_from_dict(): - await test_batch_create_sessions_async(request_type=dict) - -def test_batch_create_sessions_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchCreateSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = spanner.BatchCreateSessionsResponse() - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_create_sessions_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchCreateSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) - await client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_batch_create_sessions_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_create_sessions( - database='database_value', - session_count=1420, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].session_count - mock_val = 1420 - assert arg == mock_val - - -def test_batch_create_sessions_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - -@pytest.mark.asyncio -async def test_batch_create_sessions_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.BatchCreateSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_create_sessions( - database='database_value', - session_count=1420, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].session_count - mock_val = 1420 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_create_sessions_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.GetSessionRequest, - dict, -]) -def test_get_session(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - response = client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.GetSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -def test_get_session_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.GetSessionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_session(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.GetSessionRequest( - name='name_value', - ) - -def test_get_session_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_session] = mock_rpc - request = {} - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_session in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_session] = mock_rpc - - request = {} - await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_session_async(transport: str = 'grpc_asyncio', request_type=spanner.GetSessionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - response = await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.GetSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.asyncio -async def test_get_session_async_from_dict(): - await test_get_session_async(request_type=dict) - -def test_get_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.GetSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = spanner.Session() - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.GetSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - await client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_session_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.Session() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.ListSessionsRequest, - dict, -]) -def test_list_sessions(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sessions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ListSessionsRequest( - database='database_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_sessions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ListSessionsRequest( - database='database_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_sessions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_sessions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_sessions] = mock_rpc - - request = {} - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_sessions_async(transport: str = 'grpc_asyncio', request_type=spanner.ListSessionsRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ListSessionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_sessions_async_from_dict(): - await test_list_sessions_async(request_type=dict) - -def test_list_sessions_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ListSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = spanner.ListSessionsResponse() - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_sessions_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ListSessionsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) - await client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_list_sessions_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_sessions( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_list_sessions_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_list_sessions_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ListSessionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_sessions( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_sessions_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - - -def test_list_sessions_pager(transport_name: str = "grpc"): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('database', ''), - )), - ) - pager = client.list_sessions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner.Session) - for i in results) -def test_list_sessions_pages(transport_name: str = "grpc"): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - pages = list(client.list_sessions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_sessions_async_pager(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_sessions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner.Session) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_sessions_async_pages(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_sessions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner.DeleteSessionRequest, - dict, -]) -def test_delete_session(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.DeleteSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_session_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.DeleteSessionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_session(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.DeleteSessionRequest( - name='name_value', - ) - -def test_delete_session_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc - request = {} - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_session_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_session in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_session] = mock_rpc - - request = {} - await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_session_async(transport: str = 'grpc_asyncio', request_type=spanner.DeleteSessionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.DeleteSessionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_session_async_from_dict(): - await test_delete_session_async(request_type=dict) - -def test_delete_session_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.DeleteSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = None - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_session_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.DeleteSessionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_session_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_session_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_session_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_session( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_session_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_sql(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = result_set.ResultSet( - ) - response = client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -def test_execute_sql_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.execute_sql(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - -def test_execute_sql_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc - request = {} - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.execute_sql in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.execute_sql] = mock_rpc - - request = {} - await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - response = await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.asyncio -async def test_execute_sql_async_from_dict(): - await test_execute_sql_async(request_type=dict) - -def test_execute_sql_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_sql_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) - await client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_streaming_sql(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([result_set.PartialResultSet()]) - response = client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, result_set.PartialResultSet) - - -def test_execute_streaming_sql_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.execute_streaming_sql(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteSqlRequest( - session='session_value', - sql='sql_value', - ) - -def test_execute_streaming_sql_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_streaming_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc - request = {} - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_streaming_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.execute_streaming_sql in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.execute_streaming_sql] = mock_rpc - - request = {} - await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_streaming_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteSqlRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - response = await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteSqlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, result_set.PartialResultSet) - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_async_from_dict(): - await test_execute_streaming_sql_async(request_type=dict) - -def test_execute_streaming_sql_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_streaming_sql_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteSqlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteBatchDmlRequest, - dict, -]) -def test_execute_batch_dml(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.ExecuteBatchDmlResponse( - ) - response = client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteBatchDmlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -def test_execute_batch_dml_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ExecuteBatchDmlRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.execute_batch_dml(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ExecuteBatchDmlRequest( - session='session_value', - ) - -def test_execute_batch_dml_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_batch_dml in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc - request = {} - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_batch_dml(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_batch_dml_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.execute_batch_dml in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.execute_batch_dml] = mock_rpc - - request = {} - await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.execute_batch_dml(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_execute_batch_dml_async(transport: str = 'grpc_asyncio', request_type=spanner.ExecuteBatchDmlRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( - )) - response = await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ExecuteBatchDmlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -@pytest.mark.asyncio -async def test_execute_batch_dml_async_from_dict(): - await test_execute_batch_dml_async(request_type=dict) - -def test_execute_batch_dml_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteBatchDmlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = spanner.ExecuteBatchDmlResponse() - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_execute_batch_dml_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ExecuteBatchDmlRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse()) - await client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_read(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = result_set.ResultSet( - ) - response = client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -def test_read_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.read(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - -def test_read_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.read] = mock_rpc - request = {} - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.read in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.read] = mock_rpc - - request = {} - await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - response = await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.asyncio -async def test_read_async_from_dict(): - await test_read_async(request_type=dict) - -def test_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet()) - await client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_streaming_read(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([result_set.PartialResultSet()]) - response = client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, result_set.PartialResultSet) - - -def test_streaming_read_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.streaming_read(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.ReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - -def test_streaming_read_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.streaming_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc - request = {} - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.streaming_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_streaming_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.streaming_read in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.streaming_read] = mock_rpc - - request = {} - await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.streaming_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_streaming_read_async(transport: str = 'grpc_asyncio', request_type=spanner.ReadRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - response = await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.ReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, result_set.PartialResultSet) - - -@pytest.mark.asyncio -async def test_streaming_read_async_from_dict(): - await test_streaming_read_async(request_type=dict) - -def test_streaming_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_streaming_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.ReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.BeginTransactionRequest, - dict, -]) -def test_begin_transaction(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction( - id=b'id_blob', - ) - response = client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -def test_begin_transaction_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.BeginTransactionRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.begin_transaction(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BeginTransactionRequest( - session='session_value', - ) - -def test_begin_transaction_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_begin_transaction_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.begin_transaction in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.begin_transaction] = mock_rpc - - request = {} - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_begin_transaction_async(transport: str = 'grpc_asyncio', request_type=spanner.BeginTransactionRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( - id=b'id_blob', - )) - response = await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.BeginTransactionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -@pytest.mark.asyncio -async def test_begin_transaction_async_from_dict(): - await test_begin_transaction_async(request_type=dict) - -def test_begin_transaction_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BeginTransactionRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = transaction.Transaction() - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_begin_transaction_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BeginTransactionRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) - await client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_begin_transaction_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.begin_transaction( - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].options - mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - assert arg == mock_val - - -def test_begin_transaction_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transaction.Transaction() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.begin_transaction( - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].options - mock_val = transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_begin_transaction_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.CommitRequest, - dict, -]) -def test_commit(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse( - ) - response = client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -def test_commit_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.CommitRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.commit(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.CommitRequest( - session='session_value', - ) - -def test_commit_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.commit in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.commit] = mock_rpc - request = {} - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_commit_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.commit in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.commit] = mock_rpc - - request = {} - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_commit_async(transport: str = 'grpc_asyncio', request_type=spanner.CommitRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( - )) - response = await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.CommitRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -@pytest.mark.asyncio -async def test_commit_async_from_dict(): - await test_commit_async(request_type=dict) - -def test_commit_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CommitRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = commit_response.CommitResponse() - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_commit_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.CommitRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) - await client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_commit_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.commit( - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutations - mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] - assert arg == mock_val - assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - - -def test_commit_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - -@pytest.mark.asyncio -async def test_commit_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.commit( - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutations - mock_val = [mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))] - assert arg == mock_val - assert args[0].single_use_transaction == transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)) - -@pytest.mark.asyncio -async def test_commit_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.RollbackRequest, - dict, -]) -def test_rollback(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_rollback_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.RollbackRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rollback(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.RollbackRequest( - session='session_value', - ) - -def test_rollback_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rollback in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rollback] = mock_rpc - request = {} - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rollback_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rollback in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rollback] = mock_rpc - - request = {} - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rollback_async(transport: str = 'grpc_asyncio', request_type=spanner.RollbackRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.RollbackRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_rollback_async_from_dict(): - await test_rollback_async(request_type=dict) - -def test_rollback_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.RollbackRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = None - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rollback_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.RollbackRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_rollback_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rollback( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].transaction_id - mock_val = b'transaction_id_blob' - assert arg == mock_val - - -def test_rollback_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - -@pytest.mark.asyncio -async def test_rollback_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rollback( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].transaction_id - mock_val = b'transaction_id_blob' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rollback_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionQueryRequest, - dict, -]) -def test_partition_query(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.PartitionResponse( - ) - response = client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -def test_partition_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.PartitionQueryRequest( - session='session_value', - sql='sql_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.partition_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionQueryRequest( - session='session_value', - sql='sql_value', - ) - -def test_partition_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.partition_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.partition_query] = mock_rpc - - request = {} - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_query_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionQueryRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - response = await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.PartitionQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.asyncio -async def test_partition_query_async_from_dict(): - await test_partition_query_async(request_type=dict) - -def test_partition_query_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionQueryRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_query_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionQueryRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) - await client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionReadRequest, - dict, -]) -def test_partition_read(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner.PartitionResponse( - ) - response = client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.PartitionReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -def test_partition_read_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.PartitionReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.partition_read(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.PartitionReadRequest( - session='session_value', - table='table_value', - index='index_value', - ) - -def test_partition_read_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc - request = {} - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_read_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.partition_read in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.partition_read] = mock_rpc - - request = {} - await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.partition_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_partition_read_async(transport: str = 'grpc_asyncio', request_type=spanner.PartitionReadRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - response = await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.PartitionReadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.asyncio -async def test_partition_read_async_from_dict(): - await test_partition_read_async(request_type=dict) - -def test_partition_read_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_partition_read_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.PartitionReadRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse()) - await client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchWriteRequest, - dict, -]) -def test_batch_write(request_type, transport: str = 'grpc'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([spanner.BatchWriteResponse()]) - response = client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, spanner.BatchWriteResponse) - - -def test_batch_write_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner.BatchWriteRequest( - session='session_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_write(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner.BatchWriteRequest( - session='session_value', - ) - -def test_batch_write_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc - request = {} - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_write_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_write in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_write] = mock_rpc - - request = {} - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_write_async(transport: str = 'grpc_asyncio', request_type=spanner.BatchWriteRequest): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) - response = await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner.BatchWriteRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, spanner.BatchWriteResponse) - - -@pytest.mark.asyncio -async def test_batch_write_async_from_dict(): - await test_batch_write_async(request_type=dict) - -def test_batch_write_field_headers(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchWriteRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = iter([spanner.BatchWriteResponse()]) - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_write_field_headers_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner.BatchWriteRequest() - - request.session = 'session_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) - await client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'session=session_value', - ) in kw['metadata'] - - -def test_batch_write_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([spanner.BatchWriteResponse()]) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_write( - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutation_groups - mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] - assert arg == mock_val - - -def test_batch_write_flattened_error(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_write( - spanner.BatchWriteRequest(), - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - -@pytest.mark.asyncio -async def test_batch_write_flattened_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([spanner.BatchWriteResponse()]) - - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_write( - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].session - mock_val = 'session_value' - assert arg == mock_val - arg = args[0].mutation_groups - mock_val = [spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_write_flattened_error_async(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_write( - spanner.BatchWriteRequest(), - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - -def test_create_session_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_session] = mock_rpc - - request = {} - client.create_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_session_rest_required_fields(request_type=spanner.CreateSessionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_session(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_session_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_session._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "session", ))) - - -def test_create_session_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_session(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) - - -def test_create_session_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_session( - spanner.CreateSessionRequest(), - database='database_value', - ) - - -def test_batch_create_sessions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_create_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_create_sessions] = mock_rpc - - request = {} - client.batch_create_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_create_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_create_sessions_rest_required_fields(request_type=spanner.BatchCreateSessionsRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["database"] = "" - request_init["session_count"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - jsonified_request["sessionCount"] = 1420 - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_create_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - assert "sessionCount" in jsonified_request - assert jsonified_request["sessionCount"] == 1420 - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.BatchCreateSessionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_create_sessions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_create_sessions_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_create_sessions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "sessionCount", ))) - - -def test_batch_create_sessions_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchCreateSessionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - session_count=1420, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.batch_create_sessions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions:batchCreate" % client.transport._host, args[1]) - - -def test_batch_create_sessions_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_create_sessions( - spanner.BatchCreateSessionsRequest(), - database='database_value', - session_count=1420, - ) - - -def test_get_session_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_session] = mock_rpc - - request = {} - client.get_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_session_rest_required_fields(request_type=spanner.GetSessionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_session(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_session_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_session._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_session_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_session(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) - - -def test_get_session_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_session( - spanner.GetSessionRequest(), - name='name_value', - ) - - -def test_list_sessions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_sessions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_sessions] = mock_rpc - - request = {} - client.list_sessions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_sessions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_sessions_rest_required_fields(request_type=spanner.ListSessionsRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sessions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.ListSessionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_sessions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_sessions_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_sessions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("database", ))) - - -def test_list_sessions_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.ListSessionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_sessions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/sessions" % client.transport._host, args[1]) - - -def test_list_sessions_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sessions( - spanner.ListSessionsRequest(), - database='database_value', - ) - - -def test_list_sessions_rest_pager(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - spanner.Session(), - ], - next_page_token='abc', - ), - spanner.ListSessionsResponse( - sessions=[], - next_page_token='def', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - ], - next_page_token='ghi', - ), - spanner.ListSessionsResponse( - sessions=[ - spanner.Session(), - spanner.Session(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner.ListSessionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - pager = client.list_sessions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner.Session) - for i in results) - - pages = list(client.list_sessions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_session_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_session in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_session] = mock_rpc - - request = {} - client.delete_session(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_session(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_session_rest_required_fields(request_type=spanner.DeleteSessionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_session._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_session(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_session_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_session._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_session_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_session(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/sessions/*}" % client.transport._host, args[1]) - - -def test_delete_session_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_session( - spanner.DeleteSessionRequest(), - name='name_value', - ) - - -def test_execute_sql_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_sql] = mock_rpc - - request = {} - client.execute_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_execute_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["sql"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["sql"] = 'sql_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "sql" in jsonified_request - assert jsonified_request["sql"] == 'sql_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.execute_sql(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_execute_sql_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.execute_sql._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "sql", ))) - - -def test_execute_streaming_sql_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_streaming_sql in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_streaming_sql] = mock_rpc - - request = {} - client.execute_streaming_sql(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_streaming_sql(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_execute_streaming_sql_rest_required_fields(request_type=spanner.ExecuteSqlRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["sql"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["sql"] = 'sql_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_streaming_sql._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "sql" in jsonified_request - assert jsonified_request["sql"] == 'sql_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.execute_streaming_sql(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_execute_streaming_sql_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.execute_streaming_sql._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "sql", ))) - - -def test_execute_batch_dml_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.execute_batch_dml in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.execute_batch_dml] = mock_rpc - - request = {} - client.execute_batch_dml(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.execute_batch_dml(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_execute_batch_dml_rest_required_fields(request_type=spanner.ExecuteBatchDmlRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["seqno"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["seqno"] = 550 - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).execute_batch_dml._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "seqno" in jsonified_request - assert jsonified_request["seqno"] == 550 - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.ExecuteBatchDmlResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.execute_batch_dml(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_execute_batch_dml_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.execute_batch_dml._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "transaction", "statements", "seqno", ))) - - -def test_read_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.read] = mock_rpc - - request = {} - client.read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_read_rest_required_fields(request_type=spanner.ReadRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["table"] = "" - request_init["columns"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["table"] = 'table_value' - jsonified_request["columns"] = 'columns_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "table" in jsonified_request - assert jsonified_request["table"] == 'table_value' - assert "columns" in jsonified_request - assert jsonified_request["columns"] == 'columns_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.read(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_read_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.read._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) - - -def test_streaming_read_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.streaming_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.streaming_read] = mock_rpc - - request = {} - client.streaming_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.streaming_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_streaming_read_rest_required_fields(request_type=spanner.ReadRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["table"] = "" - request_init["columns"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["table"] = 'table_value' - jsonified_request["columns"] = 'columns_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).streaming_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "table" in jsonified_request - assert jsonified_request["table"] == 'table_value' - assert "columns" in jsonified_request - assert jsonified_request["columns"] == 'columns_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.streaming_read(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_streaming_read_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.streaming_read._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "table", "columns", "keySet", ))) - - -def test_begin_transaction_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.begin_transaction in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.begin_transaction] = mock_rpc - - request = {} - client.begin_transaction(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.begin_transaction(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_begin_transaction_rest_required_fields(request_type=spanner.BeginTransactionRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).begin_transaction._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = transaction.Transaction() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.begin_transaction(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_begin_transaction_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.begin_transaction._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "options", ))) - - -def test_begin_transaction_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transaction.Transaction() - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.begin_transaction(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:beginTransaction" % client.transport._host, args[1]) - - -def test_begin_transaction_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.begin_transaction( - spanner.BeginTransactionRequest(), - session='session_value', - options=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -def test_commit_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.commit in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.commit] = mock_rpc - - request = {} - client.commit(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.commit(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_commit_rest_required_fields(request_type=spanner.CommitRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).commit._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.commit(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_commit_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.commit._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", ))) - - -def test_commit_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.commit(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:commit" % client.transport._host, args[1]) - - -def test_commit_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.commit( - spanner.CommitRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))], - single_use_transaction=transaction.TransactionOptions(read_write=transaction.TransactionOptions.ReadWrite(read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC)), - ) - - -def test_rollback_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rollback in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rollback] = mock_rpc - - request = {} - client.rollback(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rollback(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rollback_rest_required_fields(request_type=spanner.RollbackRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["transaction_id"] = b'' - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["transactionId"] = b'transaction_id_blob' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rollback._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "transactionId" in jsonified_request - assert jsonified_request["transactionId"] == b'transaction_id_blob' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rollback(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rollback_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rollback._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "transactionId", ))) - - -def test_rollback_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - transaction_id=b'transaction_id_blob', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rollback(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:rollback" % client.transport._host, args[1]) - - -def test_rollback_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rollback( - spanner.RollbackRequest(), - session='session_value', - transaction_id=b'transaction_id_blob', - ) - - -def test_partition_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_query] = mock_rpc - - request = {} - client.partition_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_partition_query_rest_required_fields(request_type=spanner.PartitionQueryRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["sql"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["sql"] = 'sql_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "sql" in jsonified_request - assert jsonified_request["sql"] == 'sql_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.partition_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_partition_query_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.partition_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "sql", ))) - - -def test_partition_read_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.partition_read in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.partition_read] = mock_rpc - - request = {} - client.partition_read(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.partition_read(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_partition_read_rest_required_fields(request_type=spanner.PartitionReadRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request_init["table"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - jsonified_request["table"] = 'table_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).partition_read._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - assert "table" in jsonified_request - assert jsonified_request["table"] == 'table_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.partition_read(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_partition_read_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.partition_read._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "table", "keySet", ))) - - -def test_batch_write_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_write in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_write] = mock_rpc - - request = {} - client.batch_write(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_write(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_write_rest_required_fields(request_type=spanner.BatchWriteRequest): - transport_class = transports.SpannerRestTransport - - request_init = {} - request_init["session"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["session"] = 'session_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_write._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "session" in jsonified_request - assert jsonified_request["session"] == 'session_value' - - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner.BatchWriteResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.batch_write(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_write_rest_unset_required_fields(): - transport = transports.SpannerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_write._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("session", "mutationGroups", ))) - - -def test_batch_write_rest_flattened(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchWriteResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - client.batch_write(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{session=projects/*/instances/*/databases/*/sessions/*}:batchWrite" % client.transport._host, args[1]) - - -def test_batch_write_rest_flattened_error(transport: str = 'rest'): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_write( - spanner.BatchWriteRequest(), - session='session_value', - mutation_groups=[spanner.BatchWriteRequest.MutationGroup(mutations=[mutation.Mutation(insert=mutation.Mutation.Write(table='table_value'))])], - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SpannerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SpannerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SpannerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SpannerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpannerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SpannerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - transports.SpannerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = SpannerClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_session_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - call.return_value = spanner.Session() - client.create_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CreateSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_create_sessions_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - call.return_value = spanner.BatchCreateSessionsResponse() - client.batch_create_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchCreateSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_session_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - call.return_value = spanner.Session() - client.get_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.GetSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - call.return_value = spanner.ListSessionsResponse() - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ListSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_session_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - call.return_value = None - client.delete_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.DeleteSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_sql_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.execute_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_streaming_sql_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.execute_streaming_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_batch_dml_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - call.return_value = spanner.ExecuteBatchDmlResponse() - client.execute_batch_dml(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteBatchDmlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_read_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - call.return_value = result_set.ResultSet() - client.read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_streaming_read_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - call.return_value = iter([result_set.PartialResultSet()]) - client.streaming_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_begin_transaction_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - call.return_value = transaction.Transaction() - client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_commit_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - call.return_value = commit_response.CommitResponse() - client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rollback_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - call.return_value = None - client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_query_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_read_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - call.return_value = spanner.PartitionResponse() - client.partition_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_write_empty_call_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - call.return_value = iter([spanner.BatchWriteResponse()]) - client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchWriteRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = SpannerAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_session_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - await client.create_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CreateSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_create_sessions_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.BatchCreateSessionsResponse( - )) - await client.batch_create_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchCreateSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_session_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - )) - await client.get_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.GetSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_sessions_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ListSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_session_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.DeleteSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_sql_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - await client.execute_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_streaming_sql_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.execute_streaming_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_execute_batch_dml_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.ExecuteBatchDmlResponse( - )) - await client.execute_batch_dml(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteBatchDmlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_read_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(result_set.ResultSet( - )) - await client.read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_streaming_read_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[result_set.PartialResultSet()]) - await client.streaming_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_begin_transaction_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transaction.Transaction( - id=b'id_blob', - )) - await client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_commit_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(commit_response.CommitResponse( - )) - await client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rollback_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_partition_query_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - await client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_partition_read_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner.PartitionResponse( - )) - await client.partition_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_write_empty_call_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[spanner.BatchWriteResponse()]) - await client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchWriteRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = SpannerClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_session_rest_bad_request(request_type=spanner.CreateSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_session(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.CreateSessionRequest, - dict, -]) -def test_create_session_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_session(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_session_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_create_session") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_create_session_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_create_session") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.CreateSessionRequest.pb(spanner.CreateSessionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.Session.to_json(spanner.Session()) - req.return_value.content = return_value - - request = spanner.CreateSessionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.Session() - post_with_metadata.return_value = spanner.Session(), metadata - - client.create_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_create_sessions_rest_bad_request(request_type=spanner.BatchCreateSessionsRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_create_sessions(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchCreateSessionsRequest, - dict, -]) -def test_batch_create_sessions_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchCreateSessionsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchCreateSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_create_sessions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchCreateSessionsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_create_sessions_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_create_sessions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_create_sessions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.BatchCreateSessionsRequest.pb(spanner.BatchCreateSessionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.BatchCreateSessionsResponse.to_json(spanner.BatchCreateSessionsResponse()) - req.return_value.content = return_value - - request = spanner.BatchCreateSessionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.BatchCreateSessionsResponse() - post_with_metadata.return_value = spanner.BatchCreateSessionsResponse(), metadata - - client.batch_create_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_session_rest_bad_request(request_type=spanner.GetSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_session(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.GetSessionRequest, - dict, -]) -def test_get_session_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.Session( - name='name_value', - creator_role='creator_role_value', - multiplexed=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.Session.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_session(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.Session) - assert response.name == 'name_value' - assert response.creator_role == 'creator_role_value' - assert response.multiplexed is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_session_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_get_session") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_get_session_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_get_session") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.GetSessionRequest.pb(spanner.GetSessionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.Session.to_json(spanner.Session()) - req.return_value.content = return_value - - request = spanner.GetSessionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.Session() - post_with_metadata.return_value = spanner.Session(), metadata - - client.get_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_sessions_rest_bad_request(request_type=spanner.ListSessionsRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_sessions(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ListSessionsRequest, - dict, -]) -def test_list_sessions_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.ListSessionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ListSessionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_sessions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSessionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sessions_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_list_sessions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_list_sessions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ListSessionsRequest.pb(spanner.ListSessionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.ListSessionsResponse.to_json(spanner.ListSessionsResponse()) - req.return_value.content = return_value - - request = spanner.ListSessionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.ListSessionsResponse() - post_with_metadata.return_value = spanner.ListSessionsResponse(), metadata - - client.list_sessions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_session_rest_bad_request(request_type=spanner.DeleteSessionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_session(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.DeleteSessionRequest, - dict, -]) -def test_delete_session_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_session(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_session_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_delete_session") as pre: - pre.assert_not_called() - pb_message = spanner.DeleteSessionRequest.pb(spanner.DeleteSessionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner.DeleteSessionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_session(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_execute_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_sql(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_sql_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_sql(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_sql_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_sql_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_sql") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.ResultSet.to_json(result_set.ResultSet()) - req.return_value.content = return_value - - request = spanner.ExecuteSqlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.ResultSet() - post_with_metadata.return_value = result_set.ResultSet(), metadata - - client.execute_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_execute_streaming_sql_rest_bad_request(request_type=spanner.ExecuteSqlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_streaming_sql(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteSqlRequest, - dict, -]) -def test_execute_streaming_sql_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet( - chunked_value=True, - resume_token=b'resume_token_blob', - last=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_streaming_sql(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.PartialResultSet) - assert response.chunked_value is True - assert response.resume_token == b'resume_token_blob' - assert response.last is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_streaming_sql_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_streaming_sql_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_streaming_sql") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ExecuteSqlRequest.pb(spanner.ExecuteSqlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = spanner.ExecuteSqlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.PartialResultSet() - post_with_metadata.return_value = result_set.PartialResultSet(), metadata - - client.execute_streaming_sql(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_execute_batch_dml_rest_bad_request(request_type=spanner.ExecuteBatchDmlRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.execute_batch_dml(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ExecuteBatchDmlRequest, - dict, -]) -def test_execute_batch_dml_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.ExecuteBatchDmlResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.ExecuteBatchDmlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.execute_batch_dml(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.ExecuteBatchDmlResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_execute_batch_dml_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_execute_batch_dml_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_execute_batch_dml") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ExecuteBatchDmlRequest.pb(spanner.ExecuteBatchDmlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.ExecuteBatchDmlResponse.to_json(spanner.ExecuteBatchDmlResponse()) - req.return_value.content = return_value - - request = spanner.ExecuteBatchDmlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.ExecuteBatchDmlResponse() - post_with_metadata.return_value = spanner.ExecuteBatchDmlResponse(), metadata - - client.execute_batch_dml(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_read_rest_bad_request(request_type=spanner.ReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.read(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_read_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.ResultSet( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.ResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.read(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.ResultSet) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_read_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_read") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_read_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_read") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.ResultSet.to_json(result_set.ResultSet()) - req.return_value.content = return_value - - request = spanner.ReadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.ResultSet() - post_with_metadata.return_value = result_set.ResultSet(), metadata - - client.read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_streaming_read_rest_bad_request(request_type=spanner.ReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.streaming_read(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.ReadRequest, - dict, -]) -def test_streaming_read_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = result_set.PartialResultSet( - chunked_value=True, - resume_token=b'resume_token_blob', - last=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = result_set.PartialResultSet.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.streaming_read(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, result_set.PartialResultSet) - assert response.chunked_value is True - assert response.resume_token == b'resume_token_blob' - assert response.last is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_streaming_read_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_streaming_read_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_streaming_read") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.ReadRequest.pb(spanner.ReadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = result_set.PartialResultSet.to_json(result_set.PartialResultSet()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = spanner.ReadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = result_set.PartialResultSet() - post_with_metadata.return_value = result_set.PartialResultSet(), metadata - - client.streaming_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_begin_transaction_rest_bad_request(request_type=spanner.BeginTransactionRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.begin_transaction(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.BeginTransactionRequest, - dict, -]) -def test_begin_transaction_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transaction.Transaction( - id=b'id_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transaction.Transaction.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.begin_transaction(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, transaction.Transaction) - assert response.id == b'id_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_begin_transaction_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_begin_transaction_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_begin_transaction") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.BeginTransactionRequest.pb(spanner.BeginTransactionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = transaction.Transaction.to_json(transaction.Transaction()) - req.return_value.content = return_value - - request = spanner.BeginTransactionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = transaction.Transaction() - post_with_metadata.return_value = transaction.Transaction(), metadata - - client.begin_transaction(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_commit_rest_bad_request(request_type=spanner.CommitRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.commit(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.CommitRequest, - dict, -]) -def test_commit_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = commit_response.CommitResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.commit(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, commit_response.CommitResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_commit") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_commit_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_commit") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.CommitRequest.pb(spanner.CommitRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = commit_response.CommitResponse.to_json(commit_response.CommitResponse()) - req.return_value.content = return_value - - request = spanner.CommitRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = commit_response.CommitResponse() - post_with_metadata.return_value = commit_response.CommitResponse(), metadata - - client.commit(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rollback_rest_bad_request(request_type=spanner.RollbackRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rollback(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.RollbackRequest, - dict, -]) -def test_rollback_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rollback(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_rollback") as pre: - pre.assert_not_called() - pb_message = spanner.RollbackRequest.pb(spanner.RollbackRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner.RollbackRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.rollback(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_partition_query_rest_bad_request(request_type=spanner.PartitionQueryRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.partition_query(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionQueryRequest, - dict, -]) -def test_partition_query_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.partition_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_query_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_query") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.PartitionQueryRequest.pb(spanner.PartitionQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) - req.return_value.content = return_value - - request = spanner.PartitionQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.PartitionResponse() - post_with_metadata.return_value = spanner.PartitionResponse(), metadata - - client.partition_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_partition_read_rest_bad_request(request_type=spanner.PartitionReadRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.partition_read(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.PartitionReadRequest, - dict, -]) -def test_partition_read_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.PartitionResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.PartitionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.partition_read(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.PartitionResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_partition_read_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_partition_read_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_partition_read") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.PartitionReadRequest.pb(spanner.PartitionReadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.PartitionResponse.to_json(spanner.PartitionResponse()) - req.return_value.content = return_value - - request = spanner.PartitionReadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.PartitionResponse() - post_with_metadata.return_value = spanner.PartitionResponse(), metadata - - client.partition_read(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_write_rest_bad_request(request_type=spanner.BatchWriteRequest): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_write(request) - - -@pytest.mark.parametrize("request_type", [ - spanner.BatchWriteRequest, - dict, -]) -def test_batch_write_rest_call_success(request_type): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'session': 'projects/sample1/instances/sample2/databases/sample3/sessions/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner.BatchWriteResponse( - indexes=[752], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner.BatchWriteResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - json_return_value = "[{}]".format(json_return_value) - response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_write(request) - - assert isinstance(response, Iterable) - response = next(response) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner.BatchWriteResponse) - assert response.indexes == [752] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_write_rest_interceptors(null_interceptor): - transport = transports.SpannerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SpannerRestInterceptor(), - ) - client = SpannerClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write") as post, \ - mock.patch.object(transports.SpannerRestInterceptor, "post_batch_write_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.SpannerRestInterceptor, "pre_batch_write") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner.BatchWriteRequest.pb(spanner.BatchWriteRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner.BatchWriteResponse.to_json(spanner.BatchWriteResponse()) - req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) - - request = spanner.BatchWriteRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner.BatchWriteResponse() - post_with_metadata.return_value = spanner.BatchWriteResponse(), metadata - - client.batch_write(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_session_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_session), - '__call__') as call: - client.create_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CreateSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_create_sessions_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_create_sessions), - '__call__') as call: - client.batch_create_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchCreateSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_session_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_session), - '__call__') as call: - client.get_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.GetSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_sessions_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_sessions), - '__call__') as call: - client.list_sessions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ListSessionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_session_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_session), - '__call__') as call: - client.delete_session(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.DeleteSessionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_sql_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_sql), - '__call__') as call: - client.execute_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_streaming_sql_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_streaming_sql), - '__call__') as call: - client.execute_streaming_sql(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteSqlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_execute_batch_dml_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.execute_batch_dml), - '__call__') as call: - client.execute_batch_dml(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ExecuteBatchDmlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_read_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.read), - '__call__') as call: - client.read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_streaming_read_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.streaming_read), - '__call__') as call: - client.streaming_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.ReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_begin_transaction_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.begin_transaction), - '__call__') as call: - client.begin_transaction(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BeginTransactionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_commit_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.commit), - '__call__') as call: - client.commit(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.CommitRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rollback_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rollback), - '__call__') as call: - client.rollback(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.RollbackRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_query_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_query), - '__call__') as call: - client.partition_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_partition_read_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.partition_read), - '__call__') as call: - client.partition_read(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.PartitionReadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_write_empty_call_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_write), - '__call__') as call: - client.batch_write(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner.BatchWriteRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SpannerGrpcTransport, - ) - -def test_spanner_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SpannerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_spanner_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.SpannerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_session', - 'batch_create_sessions', - 'get_session', - 'list_sessions', - 'delete_session', - 'execute_sql', - 'execute_streaming_sql', - 'execute_batch_dml', - 'read', - 'streaming_read', - 'begin_transaction', - 'commit', - 'rollback', - 'partition_query', - 'partition_read', - 'batch_write', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_spanner_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - quota_project_id="octopus", - ) - - -def test_spanner_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_v1.services.spanner.transports.SpannerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SpannerTransport() - adc.assert_called_once() - - -def test_spanner_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SpannerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - ], -) -def test_spanner_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.data',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SpannerGrpcTransport, - transports.SpannerGrpcAsyncIOTransport, - transports.SpannerRestTransport, - ], -) -def test_spanner_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SpannerGrpcTransport, grpc_helpers), - (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_spanner_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.data', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_spanner_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.SpannerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_spanner_host_no_port(transport_name): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_spanner_host_with_port(transport_name): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_spanner_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = SpannerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = SpannerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_session._session - session2 = client2.transport.create_session._session - assert session1 != session2 - session1 = client1.transport.batch_create_sessions._session - session2 = client2.transport.batch_create_sessions._session - assert session1 != session2 - session1 = client1.transport.get_session._session - session2 = client2.transport.get_session._session - assert session1 != session2 - session1 = client1.transport.list_sessions._session - session2 = client2.transport.list_sessions._session - assert session1 != session2 - session1 = client1.transport.delete_session._session - session2 = client2.transport.delete_session._session - assert session1 != session2 - session1 = client1.transport.execute_sql._session - session2 = client2.transport.execute_sql._session - assert session1 != session2 - session1 = client1.transport.execute_streaming_sql._session - session2 = client2.transport.execute_streaming_sql._session - assert session1 != session2 - session1 = client1.transport.execute_batch_dml._session - session2 = client2.transport.execute_batch_dml._session - assert session1 != session2 - session1 = client1.transport.read._session - session2 = client2.transport.read._session - assert session1 != session2 - session1 = client1.transport.streaming_read._session - session2 = client2.transport.streaming_read._session - assert session1 != session2 - session1 = client1.transport.begin_transaction._session - session2 = client2.transport.begin_transaction._session - assert session1 != session2 - session1 = client1.transport.commit._session - session2 = client2.transport.commit._session - assert session1 != session2 - session1 = client1.transport.rollback._session - session2 = client2.transport.rollback._session - assert session1 != session2 - session1 = client1.transport.partition_query._session - session2 = client2.transport.partition_query._session - assert session1 != session2 - session1 = client1.transport.partition_read._session - session2 = client2.transport.partition_read._session - assert session1 != session2 - session1 = client1.transport.batch_write._session - session2 = client2.transport.batch_write._session - assert session1 != session2 -def test_spanner_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SpannerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_spanner_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SpannerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport]) -def test_spanner_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_database_path(): - project = "squid" - instance = "clam" - database = "whelk" - expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - actual = SpannerClient.database_path(project, instance, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "database": "nudibranch", - } - path = SpannerClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_database_path(path) - assert expected == actual - -def test_session_path(): - project = "cuttlefish" - instance = "mussel" - database = "winkle" - session = "nautilus" - expected = "projects/{project}/instances/{instance}/databases/{database}/sessions/{session}".format(project=project, instance=instance, database=database, session=session, ) - actual = SpannerClient.session_path(project, instance, database, session) - assert expected == actual - - -def test_parse_session_path(): - expected = { - "project": "scallop", - "instance": "abalone", - "database": "squid", - "session": "clam", - } - path = SpannerClient.session_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_session_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SpannerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = SpannerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = SpannerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = SpannerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SpannerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = SpannerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = SpannerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = SpannerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SpannerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = SpannerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SpannerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SpannerTransport, '_prep_wrapped_messages') as prep: - transport_class = SpannerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = SpannerAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = SpannerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (SpannerClient, transports.SpannerGrpcTransport), - (SpannerAsyncClient, transports.SpannerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/spanner_admin_database/v1/.coveragerc b/owl-bot-staging/spanner_admin_database/v1/.coveragerc deleted file mode 100644 index 6085d54da4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner_admin_database/__init__.py - google/cloud/spanner_admin_database/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/spanner_admin_database/v1/.flake8 b/owl-bot-staging/spanner_admin_database/v1/.flake8 deleted file mode 100644 index 90316de214..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/spanner_admin_database/v1/LICENSE b/owl-bot-staging/spanner_admin_database/v1/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in deleted file mode 100644 index dae249ec89..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_database/v1/README.rst b/owl-bot-staging/spanner_admin_database/v1/README.rst deleted file mode 100644 index 5cfc4ab969..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Spanner Admin Database API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner Admin Database API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcf..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py b/owl-bot-staging/spanner_admin_database/v1/docs/conf.py deleted file mode 100644 index 4b16cc5e97..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner-admin-database documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner-admin-database" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-database", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-admin-database-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-database.tex", - u"google-cloud-spanner-admin-database Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-spanner-admin-database", - "google-cloud-spanner-admin-database Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-database", - "google-cloud-spanner-admin-database Documentation", - author, - "google-cloud-spanner-admin-database", - "google-cloud-spanner-admin-database Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst b/owl-bot-staging/spanner_admin_database/v1/docs/index.rst deleted file mode 100644 index 6f79e3f3d4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_admin_database_v1/services_ - spanner_admin_database_v1/types_ diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst deleted file mode 100644 index bd6aab00e4..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/database_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -DatabaseAdmin -------------------------------- - -.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_admin_database_v1.services.database_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst deleted file mode 100644 index 55e57d8dc0..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner Admin Database v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - database_admin diff --git a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst b/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst deleted file mode 100644 index fe6c27778b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/docs/spanner_admin_database_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Spanner Admin Database v1 API -==================================================== - -.. automodule:: google.cloud.spanner_admin_database_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py deleted file mode 100644 index aa445b5622..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/__init__.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_database import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.spanner_admin_database_v1.services.database_admin.client import DatabaseAdminClient -from google.cloud.spanner_admin_database_v1.services.database_admin.async_client import DatabaseAdminAsyncClient - -from google.cloud.spanner_admin_database_v1.types.backup import Backup -from google.cloud.spanner_admin_database_v1.types.backup import BackupInfo -from google.cloud.spanner_admin_database_v1.types.backup import BackupInstancePartition -from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupMetadata -from google.cloud.spanner_admin_database_v1.types.backup import CopyBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupMetadata -from google.cloud.spanner_admin_database_v1.types.backup import CreateBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import DeleteBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import FullBackupSpec -from google.cloud.spanner_admin_database_v1.types.backup import GetBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup import IncrementalBackupSpec -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupOperationsResponse -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsRequest -from google.cloud.spanner_admin_database_v1.types.backup import ListBackupsResponse -from google.cloud.spanner_admin_database_v1.types.backup import UpdateBackupRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupSchedule -from google.cloud.spanner_admin_database_v1.types.backup_schedule import BackupScheduleSpec -from google.cloud.spanner_admin_database_v1.types.backup_schedule import CreateBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import CrontabSpec -from google.cloud.spanner_admin_database_v1.types.backup_schedule import DeleteBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import GetBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesRequest -from google.cloud.spanner_admin_database_v1.types.backup_schedule import ListBackupSchedulesResponse -from google.cloud.spanner_admin_database_v1.types.backup_schedule import UpdateBackupScheduleRequest -from google.cloud.spanner_admin_database_v1.types.common import EncryptionConfig -from google.cloud.spanner_admin_database_v1.types.common import EncryptionInfo -from google.cloud.spanner_admin_database_v1.types.common import OperationProgress -from google.cloud.spanner_admin_database_v1.types.common import DatabaseDialect -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import AddSplitPointsResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import CreateDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import Database -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DatabaseRole -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DdlStatementActionInfo -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import DropDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseDdlResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import GetDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import InternalUpdateGraphOperationResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseOperationsResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabaseRolesResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import ListDatabasesResponse -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import OptimizeRestoredDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseEncryptionConfig -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreInfo -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import SplitPoints -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseDdlRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseMetadata -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import UpdateDatabaseRequest -from google.cloud.spanner_admin_database_v1.types.spanner_database_admin import RestoreSourceType - -__all__ = ('DatabaseAdminClient', - 'DatabaseAdminAsyncClient', - 'Backup', - 'BackupInfo', - 'BackupInstancePartition', - 'CopyBackupEncryptionConfig', - 'CopyBackupMetadata', - 'CopyBackupRequest', - 'CreateBackupEncryptionConfig', - 'CreateBackupMetadata', - 'CreateBackupRequest', - 'DeleteBackupRequest', - 'FullBackupSpec', - 'GetBackupRequest', - 'IncrementalBackupSpec', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'UpdateBackupRequest', - 'BackupSchedule', - 'BackupScheduleSpec', - 'CreateBackupScheduleRequest', - 'CrontabSpec', - 'DeleteBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'UpdateBackupScheduleRequest', - 'EncryptionConfig', - 'EncryptionInfo', - 'OperationProgress', - 'DatabaseDialect', - 'AddSplitPointsRequest', - 'AddSplitPointsResponse', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'Database', - 'DatabaseRole', - 'DdlStatementActionInfo', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'GetDatabaseRequest', - 'InternalUpdateGraphOperationRequest', - 'InternalUpdateGraphOperationResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'ListDatabaseRolesRequest', - 'ListDatabaseRolesResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'OptimizeRestoredDatabaseMetadata', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'RestoreDatabaseRequest', - 'RestoreInfo', - 'SplitPoints', - 'UpdateDatabaseDdlMetadata', - 'UpdateDatabaseDdlRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseRequest', - 'RestoreSourceType', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed deleted file mode 100644 index 29f334aad6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py deleted file mode 100644 index 6605eff74d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/__init__.py +++ /dev/null @@ -1,150 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.database_admin import DatabaseAdminClient -from .services.database_admin import DatabaseAdminAsyncClient - -from .types.backup import Backup -from .types.backup import BackupInfo -from .types.backup import BackupInstancePartition -from .types.backup import CopyBackupEncryptionConfig -from .types.backup import CopyBackupMetadata -from .types.backup import CopyBackupRequest -from .types.backup import CreateBackupEncryptionConfig -from .types.backup import CreateBackupMetadata -from .types.backup import CreateBackupRequest -from .types.backup import DeleteBackupRequest -from .types.backup import FullBackupSpec -from .types.backup import GetBackupRequest -from .types.backup import IncrementalBackupSpec -from .types.backup import ListBackupOperationsRequest -from .types.backup import ListBackupOperationsResponse -from .types.backup import ListBackupsRequest -from .types.backup import ListBackupsResponse -from .types.backup import UpdateBackupRequest -from .types.backup_schedule import BackupSchedule -from .types.backup_schedule import BackupScheduleSpec -from .types.backup_schedule import CreateBackupScheduleRequest -from .types.backup_schedule import CrontabSpec -from .types.backup_schedule import DeleteBackupScheduleRequest -from .types.backup_schedule import GetBackupScheduleRequest -from .types.backup_schedule import ListBackupSchedulesRequest -from .types.backup_schedule import ListBackupSchedulesResponse -from .types.backup_schedule import UpdateBackupScheduleRequest -from .types.common import EncryptionConfig -from .types.common import EncryptionInfo -from .types.common import OperationProgress -from .types.common import DatabaseDialect -from .types.spanner_database_admin import AddSplitPointsRequest -from .types.spanner_database_admin import AddSplitPointsResponse -from .types.spanner_database_admin import CreateDatabaseMetadata -from .types.spanner_database_admin import CreateDatabaseRequest -from .types.spanner_database_admin import Database -from .types.spanner_database_admin import DatabaseRole -from .types.spanner_database_admin import DdlStatementActionInfo -from .types.spanner_database_admin import DropDatabaseRequest -from .types.spanner_database_admin import GetDatabaseDdlRequest -from .types.spanner_database_admin import GetDatabaseDdlResponse -from .types.spanner_database_admin import GetDatabaseRequest -from .types.spanner_database_admin import InternalUpdateGraphOperationRequest -from .types.spanner_database_admin import InternalUpdateGraphOperationResponse -from .types.spanner_database_admin import ListDatabaseOperationsRequest -from .types.spanner_database_admin import ListDatabaseOperationsResponse -from .types.spanner_database_admin import ListDatabaseRolesRequest -from .types.spanner_database_admin import ListDatabaseRolesResponse -from .types.spanner_database_admin import ListDatabasesRequest -from .types.spanner_database_admin import ListDatabasesResponse -from .types.spanner_database_admin import OptimizeRestoredDatabaseMetadata -from .types.spanner_database_admin import RestoreDatabaseEncryptionConfig -from .types.spanner_database_admin import RestoreDatabaseMetadata -from .types.spanner_database_admin import RestoreDatabaseRequest -from .types.spanner_database_admin import RestoreInfo -from .types.spanner_database_admin import SplitPoints -from .types.spanner_database_admin import UpdateDatabaseDdlMetadata -from .types.spanner_database_admin import UpdateDatabaseDdlRequest -from .types.spanner_database_admin import UpdateDatabaseMetadata -from .types.spanner_database_admin import UpdateDatabaseRequest -from .types.spanner_database_admin import RestoreSourceType - -__all__ = ( - 'DatabaseAdminAsyncClient', -'AddSplitPointsRequest', -'AddSplitPointsResponse', -'Backup', -'BackupInfo', -'BackupInstancePartition', -'BackupSchedule', -'BackupScheduleSpec', -'CopyBackupEncryptionConfig', -'CopyBackupMetadata', -'CopyBackupRequest', -'CreateBackupEncryptionConfig', -'CreateBackupMetadata', -'CreateBackupRequest', -'CreateBackupScheduleRequest', -'CreateDatabaseMetadata', -'CreateDatabaseRequest', -'CrontabSpec', -'Database', -'DatabaseAdminClient', -'DatabaseDialect', -'DatabaseRole', -'DdlStatementActionInfo', -'DeleteBackupRequest', -'DeleteBackupScheduleRequest', -'DropDatabaseRequest', -'EncryptionConfig', -'EncryptionInfo', -'FullBackupSpec', -'GetBackupRequest', -'GetBackupScheduleRequest', -'GetDatabaseDdlRequest', -'GetDatabaseDdlResponse', -'GetDatabaseRequest', -'IncrementalBackupSpec', -'InternalUpdateGraphOperationRequest', -'InternalUpdateGraphOperationResponse', -'ListBackupOperationsRequest', -'ListBackupOperationsResponse', -'ListBackupSchedulesRequest', -'ListBackupSchedulesResponse', -'ListBackupsRequest', -'ListBackupsResponse', -'ListDatabaseOperationsRequest', -'ListDatabaseOperationsResponse', -'ListDatabaseRolesRequest', -'ListDatabaseRolesResponse', -'ListDatabasesRequest', -'ListDatabasesResponse', -'OperationProgress', -'OptimizeRestoredDatabaseMetadata', -'RestoreDatabaseEncryptionConfig', -'RestoreDatabaseMetadata', -'RestoreDatabaseRequest', -'RestoreInfo', -'RestoreSourceType', -'SplitPoints', -'UpdateBackupRequest', -'UpdateBackupScheduleRequest', -'UpdateDatabaseDdlMetadata', -'UpdateDatabaseDdlRequest', -'UpdateDatabaseMetadata', -'UpdateDatabaseRequest', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json deleted file mode 100644 index 027a4f612b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_metadata.json +++ /dev/null @@ -1,433 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_admin_database_v1", - "protoPackage": "google.spanner.admin.database.v1", - "schema": "1.0", - "services": { - "DatabaseAdmin": { - "clients": { - "grpc": { - "libraryClient": "DatabaseAdminClient", - "rpcs": { - "AddSplitPoints": { - "methods": [ - "add_split_points" - ] - }, - "CopyBackup": { - "methods": [ - "copy_backup" - ] - }, - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "InternalUpdateGraphOperation": { - "methods": [ - "internal_update_graph_operation" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabaseRoles": { - "methods": [ - "list_database_roles" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DatabaseAdminAsyncClient", - "rpcs": { - "AddSplitPoints": { - "methods": [ - "add_split_points" - ] - }, - "CopyBackup": { - "methods": [ - "copy_backup" - ] - }, - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "InternalUpdateGraphOperation": { - "methods": [ - "internal_update_graph_operation" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabaseRoles": { - "methods": [ - "list_database_roles" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - }, - "rest": { - "libraryClient": "DatabaseAdminClient", - "rpcs": { - "AddSplitPoints": { - "methods": [ - "add_split_points" - ] - }, - "CopyBackup": { - "methods": [ - "copy_backup" - ] - }, - "CreateBackup": { - "methods": [ - "create_backup" - ] - }, - "CreateBackupSchedule": { - "methods": [ - "create_backup_schedule" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupSchedule": { - "methods": [ - "delete_backup_schedule" - ] - }, - "DropDatabase": { - "methods": [ - "drop_database" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupSchedule": { - "methods": [ - "get_backup_schedule" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetDatabaseDdl": { - "methods": [ - "get_database_ddl" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "InternalUpdateGraphOperation": { - "methods": [ - "internal_update_graph_operation" - ] - }, - "ListBackupOperations": { - "methods": [ - "list_backup_operations" - ] - }, - "ListBackupSchedules": { - "methods": [ - "list_backup_schedules" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDatabaseOperations": { - "methods": [ - "list_database_operations" - ] - }, - "ListDatabaseRoles": { - "methods": [ - "list_database_roles" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "RestoreDatabase": { - "methods": [ - "restore_database" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupSchedule": { - "methods": [ - "update_backup_schedule" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateDatabaseDdl": { - "methods": [ - "update_database_ddl" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed deleted file mode 100644 index 29f334aad6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-database package uses inline types. diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py deleted file mode 100644 index 85e225bbd8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DatabaseAdminClient -from .async_client import DatabaseAdminAsyncClient - -__all__ = ( - 'DatabaseAdminClient', - 'DatabaseAdminAsyncClient', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py deleted file mode 100644 index b5e733e3a6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/async_client.py +++ /dev/null @@ -1,3968 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import uuid - -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .client import DatabaseAdminClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DatabaseAdminAsyncClient: - """Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - """ - - _client: DatabaseAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DatabaseAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DatabaseAdminClient._DEFAULT_UNIVERSE - - backup_path = staticmethod(DatabaseAdminClient.backup_path) - parse_backup_path = staticmethod(DatabaseAdminClient.parse_backup_path) - backup_schedule_path = staticmethod(DatabaseAdminClient.backup_schedule_path) - parse_backup_schedule_path = staticmethod(DatabaseAdminClient.parse_backup_schedule_path) - crypto_key_path = staticmethod(DatabaseAdminClient.crypto_key_path) - parse_crypto_key_path = staticmethod(DatabaseAdminClient.parse_crypto_key_path) - crypto_key_version_path = staticmethod(DatabaseAdminClient.crypto_key_version_path) - parse_crypto_key_version_path = staticmethod(DatabaseAdminClient.parse_crypto_key_version_path) - database_path = staticmethod(DatabaseAdminClient.database_path) - parse_database_path = staticmethod(DatabaseAdminClient.parse_database_path) - database_role_path = staticmethod(DatabaseAdminClient.database_role_path) - parse_database_role_path = staticmethod(DatabaseAdminClient.parse_database_role_path) - instance_path = staticmethod(DatabaseAdminClient.instance_path) - parse_instance_path = staticmethod(DatabaseAdminClient.parse_instance_path) - instance_partition_path = staticmethod(DatabaseAdminClient.instance_partition_path) - parse_instance_partition_path = staticmethod(DatabaseAdminClient.parse_instance_partition_path) - common_billing_account_path = staticmethod(DatabaseAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DatabaseAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DatabaseAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(DatabaseAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(DatabaseAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(DatabaseAdminClient.parse_common_organization_path) - common_project_path = staticmethod(DatabaseAdminClient.common_project_path) - parse_common_project_path = staticmethod(DatabaseAdminClient.parse_common_project_path) - common_location_path = staticmethod(DatabaseAdminClient.common_location_path) - parse_common_location_path = staticmethod(DatabaseAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminAsyncClient: The constructed client. - """ - return DatabaseAdminClient.from_service_account_info.__func__(DatabaseAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminAsyncClient: The constructed client. - """ - return DatabaseAdminClient.from_service_account_file.__func__(DatabaseAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DatabaseAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DatabaseAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatabaseAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DatabaseAdminClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the database admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DatabaseAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DatabaseAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.database_v1.DatabaseAdminAsyncClient`.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "credentialsType": None, - } - ) - - async def list_databases(self, - request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesAsyncPager: - r"""Lists Cloud Spanner databases. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]]): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - parent (:class:`str`): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabasesRequest): - request = spanner_database_admin.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabasesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_database(self, - request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - create_statement: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]]): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - parent (:class:`str`): - Required. The name of the instance that will serve the - new database. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - create_statement (:class:`str`): - Required. A ``CREATE DATABASE`` statement, which - specifies the ID of the new database. The database ID - must conform to the regular expression - ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 - characters in length. If the database ID is a reserved - word or if it contains a hyphen, the database ID must be - enclosed in backticks (:literal:`\``). - - This corresponds to the ``create_statement`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, create_statement] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): - request = spanner_database_admin.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if create_statement is not None: - request.create_statement = create_statement - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def get_database(self, - request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.Database: - r"""Gets the state of a Cloud Spanner database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]]): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - name (:class:`str`): - Required. The name of the requested database. Values are - of the form - ``projects//instances//databases/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Database: - A Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseRequest): - request = spanner_database_admin.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[spanner_database_admin.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]]): - The request object. The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - database (:class:`google.cloud.spanner_admin_database_v1.types.Database`): - Required. The database to update. The ``name`` field of - the database is of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to update. Currently, only - ``enable_drop_protection`` field can be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): - request = spanner_database_admin.UpdateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def update_database_ddl(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - statements: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]]): - The request object. Enqueues the given DDL statements to be applied, in - order but not necessarily all at once, to the database - schema at some point (or points) in the future. The - server checks that the statements are executable - (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - ``NULL`` value in a column to which ``NOT NULL`` would - be added). If a statement fails, all subsequent - statements in the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - database (:class:`str`): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - statements (:class:`MutableSequence[str]`): - Required. DDL statements to be - applied to the database. - - This corresponds to the ``statements`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, statements] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): - request = spanner_database_admin.UpdateDatabaseDdlRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if statements: - request.statements.extend(statements) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, - ) - - # Done; return the response. - return response - - async def drop_database(self, - request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - await client.drop_database(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]]): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - database (:class:`str`): - Required. The database to be dropped. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.DropDatabaseRequest): - request = spanner_database_admin.DropDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.drop_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_database_ddl(self, - request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = await client.get_database_ddl(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]]): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - database (:class:`str`): - Required. The database whose schema we wish to get. - Values are of the form - ``projects//instances//databases/`` - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): - request = spanner_database_admin.GetDatabaseDdlRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`MutableSequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_backup(self, - request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup: Optional[gsad_backup.Backup] = None, - backup_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]]): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - parent (:class:`str`): - Required. The name of the instance in which the backup - will be created. This must be the same instance that - contains the database the backup will be created from. - The backup will be stored in the location(s) specified - in the instance configuration of this instance. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): - Required. The backup to create. - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (:class:`str`): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full - backup name of the form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup, backup_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.CreateBackupRequest): - request = gsad_backup.CreateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup is not None: - request.backup = backup - if backup_id is not None: - request.backup_id = backup_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gsad_backup.Backup, - metadata_type=gsad_backup.CreateBackupMetadata, - ) - - # Done; return the response. - return response - - async def copy_backup(self, - request: Optional[Union[backup.CopyBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_id: Optional[str] = None, - source_backup: Optional[str] = None, - expire_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]]): - The request object. The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - parent (:class:`str`): - Required. The name of the destination instance that will - contain the backup copy. Values are of the form: - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (:class:`str`): - Required. The id of the backup copy. The ``backup_id`` - appended to ``parent`` forms the full backup_uri of the - form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source_backup (:class:`str`): - Required. The source backup to be copied. The source - backup needs to be in READY state for it to be copied. - Once CopyBackup is in progress, the source backup cannot - be deleted or cleaned up on expiration until CopyBackup - is finished. Values are of the form: - ``projects//instances//backups/``. - - This corresponds to the ``source_backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - expire_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. The expiration time of the backup in - microsecond granularity. The expiration time must be at - least 6 hours and at most 366 days from the - ``create_time`` of the source backup. Once the - ``expire_time`` has passed, the backup is eligible to be - automatically deleted by Cloud Spanner to free the - resources used by the backup. - - This corresponds to the ``expire_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_id, source_backup, expire_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.CopyBackupRequest): - request = backup.CopyBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_id is not None: - request.backup_id = backup_id - if source_backup is not None: - request.source_backup = source_backup - if expire_time is not None: - request.expire_time = expire_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.copy_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backup.Backup, - metadata_type=backup.CopyBackupMetadata, - ) - - # Done; return the response. - return response - - async def get_backup(self, - request: Optional[Union[backup.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]]): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - name (:class:`str`): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.GetBackupRequest): - request = backup.GetBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup(self, - request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, - *, - backup: Optional[gsad_backup.Backup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup.Backup: - r"""Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = await client.update_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]]): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - backup (:class:`google.cloud.spanner_admin_database_v1.types.Backup`): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only - supported for the following fields: - - - ``backup.expire_time``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be - updated. This mask is relative to the Backup resource, - not to the request message. The field mask must always - be specified; this prevents any future fields from being - erased accidentally by clients that do not know about - them. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.UpdateBackupRequest): - request = gsad_backup.UpdateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup(self, - request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]]): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - name (:class:`str`): - Required. Name of the backup to delete. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.DeleteBackupRequest): - request = backup.DeleteBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_backups(self, - request: Optional[Union[backup.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupsAsyncPager: - r"""Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]]): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - parent (:class:`str`): - Required. The instance to list backups from. Values are - of the form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupsRequest): - request = backup.ListBackupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def restore_database(self, - request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database_id: Optional[str] = None, - backup: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]]): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - parent (:class:`str`): - Required. The name of the instance in which to create - the restored database. This instance must be in the same - project and have the same instance configuration as the - instance containing the source backup. Values are of the - form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The id of the database to create and restore - to. This database must not already exist. The - ``database_id`` appended to ``parent`` forms the full - database name of the form - ``projects//instances//databases/``. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (:class:`str`): - Name of the backup from which to restore. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database_id, backup] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): - request = spanner_database_admin.RestoreDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database_id is not None: - request.database_id = database_id - if backup is not None: - request.backup = backup - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - async def list_database_operations(self, - request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseOperationsAsyncPager: - r"""Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]]): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - parent (:class:`str`): - Required. The instance of the database operations. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): - request = spanner_database_admin.ListDatabaseOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabaseOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_operations(self, - request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupOperationsAsyncPager: - r"""Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]]): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - parent (:class:`str`): - Required. The instance of the backup operations. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupOperationsRequest): - request = backup.ListBackupOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_database_roles(self, - request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseRolesAsyncPager: - r"""Lists Cloud Spanner database roles. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]]): - The request object. The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - parent (:class:`str`): - Required. The database whose roles should be listed. - Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager: - The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): - request = spanner_database_admin.ListDatabaseRolesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_database_roles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabaseRolesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def add_split_points(self, - request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, - *, - database: Optional[str] = None, - split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.AddSplitPointsResponse: - r"""Adds split points to specified tables, indexes of a - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = await client.add_split_points(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]]): - The request object. The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - database (:class:`str`): - Required. The database on whose tables/indexes split - points are to be added. Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - split_points (:class:`MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]`): - Required. The split points to add. - This corresponds to the ``split_points`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: - The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, split_points] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): - request = spanner_database_admin.AddSplitPointsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if split_points: - request.split_points.extend(split_points) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.add_split_points] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - backup_schedule_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Creates a new backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]]): - The request object. The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - parent (:class:`str`): - Required. The name of the database - that this backup schedule applies to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule_id (:class:`str`): - Required. The Id to use for the backup schedule. The - ``backup_schedule_id`` appended to ``parent`` forms the - full backup schedule name of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``backup_schedule_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_schedule, backup_schedule_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): - request = gsad_backup_schedule.CreateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if backup_schedule_id is not None: - request.backup_schedule_id = backup_schedule_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_backup_schedule(self, - request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup_schedule.BackupSchedule: - r"""Gets backup schedule for the input schedule name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]]): - The request object. The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - name (:class:`str`): - Required. The name of the schedule to retrieve. Values - are of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.GetBackupScheduleRequest): - request = backup_schedule.GetBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, - *, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]]): - The request object. The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - backup_schedule (:class:`google.cloud.spanner_admin_database_v1.types.BackupSchedule`): - Required. The backup schedule to update. - ``backup_schedule.name``, and the fields to be updated - as specified by ``update_mask`` are required. Other - fields are ignored. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which - fields in the BackupSchedule resource - should be updated. This mask is relative - to the BackupSchedule resource, not to - the request message. The field mask must - always be specified; this prevents any - future fields from being erased - accidentally. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_schedule, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): - request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_schedule.name", request.backup_schedule.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup_schedule(self, - request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]]): - The request object. The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - name (:class:`str`): - Required. The name of the schedule to delete. Values are - of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): - request = backup_schedule.DeleteBackupScheduleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_backup_schedules(self, - request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupSchedulesAsyncPager: - r"""Lists all the backup schedules for the database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]]): - The request object. The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - parent (:class:`str`): - Required. Database is the parent - resource whose backup schedules should - be listed. Values are of the form - projects//instances//databases/ - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager: - The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): - request = backup_schedule.ListBackupSchedulesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_schedules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupSchedulesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def internal_update_graph_operation(self, - request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, - *, - database: Optional[str] = None, - operation_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: - r"""This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - async def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = await client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]]): - The request object. Internal request proto, do not use - directly. - database (:class:`str`): - Internal field, do not use directly. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - operation_id (:class:`str`): - Internal field, do not use directly. - This corresponds to the ``operation_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: - Internal response proto, do not use - directly. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, operation_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): - request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if operation_id is not None: - request.operation_id = operation_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.internal_update_graph_operation] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "DatabaseAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "DatabaseAdminAsyncClient", -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py deleted file mode 100644 index 38e3050e48..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/client.py +++ /dev/null @@ -1,4387 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import uuid -import warnings - -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DatabaseAdminGrpcTransport -from .transports.grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .transports.rest import DatabaseAdminRestTransport - - -class DatabaseAdminClientMeta(type): - """Metaclass for the DatabaseAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] - _transport_registry["grpc"] = DatabaseAdminGrpcTransport - _transport_registry["grpc_asyncio"] = DatabaseAdminGrpcAsyncIOTransport - _transport_registry["rest"] = DatabaseAdminRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DatabaseAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DatabaseAdminClient(metaclass=DatabaseAdminClientMeta): - """Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DatabaseAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DatabaseAdminTransport: - """Returns the transport used by the client instance. - - Returns: - DatabaseAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def backup_path(project: str,instance: str,backup: str,) -> str: - """Returns a fully-qualified backup string.""" - return "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) - - @staticmethod - def parse_backup_path(path: str) -> Dict[str,str]: - """Parses a backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/backups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def backup_schedule_path(project: str,instance: str,database: str,schedule: str,) -> str: - """Returns a fully-qualified backup_schedule string.""" - return "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) - - @staticmethod - def parse_backup_schedule_path(path: str) -> Dict[str,str]: - """Parses a backup_schedule path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/backupSchedules/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: - """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - - @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: - """Parses a crypto_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def crypto_key_version_path(project: str,location: str,key_ring: str,crypto_key: str,crypto_key_version: str,) -> str: - """Returns a fully-qualified crypto_key_version string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) - - @staticmethod - def parse_crypto_key_version_path(path: str) -> Dict[str,str]: - """Parses a crypto_key_version path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)/cryptoKeyVersions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_path(project: str,instance: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_role_path(project: str,instance: str,database: str,role: str,) -> str: - """Returns a fully-qualified database_role string.""" - return "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) - - @staticmethod - def parse_database_role_path(path: str) -> Dict[str,str]: - """Parses a database_role path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/databases/(?P.+?)/databaseRoles/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_path(project: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: - """Returns a fully-qualified instance_partition string.""" - return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - - @staticmethod - def parse_instance_partition_path(path: str) -> Dict[str,str]: - """Parses a instance_partition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DatabaseAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DatabaseAdminTransport, Callable[..., DatabaseAdminTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the database admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DatabaseAdminTransport,Callable[..., DatabaseAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DatabaseAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DatabaseAdminClient._read_environment_variables() - self._client_cert_source = DatabaseAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DatabaseAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DatabaseAdminTransport) - if transport_provided: - # transport is a DatabaseAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DatabaseAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DatabaseAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DatabaseAdminTransport], Callable[..., DatabaseAdminTransport]] = ( - DatabaseAdminClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DatabaseAdminTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.database_v1.DatabaseAdminClient`.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "credentialsType": None, - } - ) - - def list_databases(self, - request: Optional[Union[spanner_database_admin.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesPager: - r"""Lists Cloud Spanner databases. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest, dict]): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - parent (str): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabasesRequest): - request = spanner_database_admin.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabasesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_database(self, - request: Optional[Union[spanner_database_admin.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - create_statement: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest, dict]): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - parent (str): - Required. The name of the instance that will serve the - new database. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - create_statement (str): - Required. A ``CREATE DATABASE`` statement, which - specifies the ID of the new database. The database ID - must conform to the regular expression - ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be between 2 and 30 - characters in length. If the database ID is a reserved - word or if it contains a hyphen, the database ID must be - enclosed in backticks (:literal:`\``). - - This corresponds to the ``create_statement`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, create_statement] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.CreateDatabaseRequest): - request = spanner_database_admin.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if create_statement is not None: - request.create_statement = create_statement - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.CreateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def get_database(self, - request: Optional[Union[spanner_database_admin.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.Database: - r"""Gets the state of a Cloud Spanner database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest, dict]): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - name (str): - Required. The name of the requested database. Values are - of the form - ``projects//instances//databases/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Database: - A Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseRequest): - request = spanner_database_admin.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[spanner_database_admin.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest, dict]): - The request object. The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - database (google.cloud.spanner_admin_database_v1.types.Database): - Required. The database to update. The ``name`` field of - the database is of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to update. Currently, only - ``enable_drop_protection`` field can be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseRequest): - request = spanner_database_admin.UpdateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.UpdateDatabaseMetadata, - ) - - # Done; return the response. - return response - - def update_database_ddl(self, - request: Optional[Union[spanner_database_admin.UpdateDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - statements: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest, dict]): - The request object. Enqueues the given DDL statements to be applied, in - order but not necessarily all at once, to the database - schema at some point (or points) in the future. The - server checks that the statements are executable - (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - ``NULL`` value in a column to which ``NOT NULL`` would - be added). If a statement fails, all subsequent - statements in the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - database (str): - Required. The database to update. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - statements (MutableSequence[str]): - Required. DDL statements to be - applied to the database. - - This corresponds to the ``statements`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, statements] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.UpdateDatabaseDdlRequest): - request = spanner_database_admin.UpdateDatabaseDdlRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if statements is not None: - request.statements = statements - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=spanner_database_admin.UpdateDatabaseDdlMetadata, - ) - - # Done; return the response. - return response - - def drop_database(self, - request: Optional[Union[spanner_database_admin.DropDatabaseRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - client.drop_database(request=request) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest, dict]): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - database (str): - Required. The database to be dropped. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.DropDatabaseRequest): - request = spanner_database_admin.DropDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.drop_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_database_ddl(self, - request: Optional[Union[spanner_database_admin.GetDatabaseDdlRequest, dict]] = None, - *, - database: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = client.get_database_ddl(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest, dict]): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - database (str): - Required. The database whose schema we wish to get. - Values are of the form - ``projects//instances//databases/`` - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.GetDatabaseDdlRequest): - request = spanner_database_admin.GetDatabaseDdlRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database_ddl] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (MutableSequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_backup(self, - request: Optional[Union[gsad_backup.CreateBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup: Optional[gsad_backup.Backup] = None, - backup_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupRequest, dict]): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - parent (str): - Required. The name of the instance in which the backup - will be created. This must be the same instance that - contains the database the backup will be created from. - The backup will be stored in the location(s) specified - in the instance configuration of this instance. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to create. - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (str): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full - backup name of the form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup, backup_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.CreateBackupRequest): - request = gsad_backup.CreateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup is not None: - request.backup = backup - if backup_id is not None: - request.backup_id = backup_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gsad_backup.Backup, - metadata_type=gsad_backup.CreateBackupMetadata, - ) - - # Done; return the response. - return response - - def copy_backup(self, - request: Optional[Union[backup.CopyBackupRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_id: Optional[str] = None, - source_backup: Optional[str] = None, - expire_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CopyBackupRequest, dict]): - The request object. The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - parent (str): - Required. The name of the destination instance that will - contain the backup copy. Values are of the form: - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_id (str): - Required. The id of the backup copy. The ``backup_id`` - appended to ``parent`` forms the full backup_uri of the - form - ``projects//instances//backups/``. - - This corresponds to the ``backup_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source_backup (str): - Required. The source backup to be copied. The source - backup needs to be in READY state for it to be copied. - Once CopyBackup is in progress, the source backup cannot - be deleted or cleaned up on expiration until CopyBackup - is finished. Values are of the form: - ``projects//instances//backups/``. - - This corresponds to the ``source_backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The expiration time of the backup in - microsecond granularity. The expiration time must be at - least 6 hours and at most 366 days from the - ``create_time`` of the source backup. Once the - ``expire_time`` has passed, the backup is eligible to be - automatically deleted by Cloud Spanner to free the - resources used by the backup. - - This corresponds to the ``expire_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Backup` - A backup of a Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_id, source_backup, expire_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.CopyBackupRequest): - request = backup.CopyBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_id is not None: - request.backup_id = backup_id - if source_backup is not None: - request.source_backup = source_backup - if expire_time is not None: - request.expire_time = expire_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.copy_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backup.Backup, - metadata_type=backup.CopyBackupMetadata, - ) - - # Done; return the response. - return response - - def get_backup(self, - request: Optional[Union[backup.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup.Backup: - r"""Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupRequest, dict]): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - name (str): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.GetBackupRequest): - request = backup.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup(self, - request: Optional[Union[gsad_backup.UpdateBackupRequest, dict]] = None, - *, - backup: Optional[gsad_backup.Backup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup.Backup: - r"""Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = client.update_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest, dict]): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only - supported for the following fields: - - - ``backup.expire_time``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be - updated. This mask is relative to the Backup resource, - not to the request message. The field mask must always - be specified; this prevents any future fields from being - erased accidentally by clients that do not know about - them. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.Backup: - A backup of a Cloud Spanner database. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup.UpdateBackupRequest): - request = gsad_backup.UpdateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup(self, - request: Optional[Union[backup.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest, dict]): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - name (str): - Required. Name of the backup to delete. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.DeleteBackupRequest): - request = backup.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_backups(self, - request: Optional[Union[backup.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupsPager: - r"""Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupsRequest, dict]): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - parent (str): - Required. The instance to list backups from. Values are - of the form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupsRequest): - request = backup.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def restore_database(self, - request: Optional[Union[spanner_database_admin.RestoreDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database_id: Optional[str] = None, - backup: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest, dict]): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - parent (str): - Required. The name of the instance in which to create - the restored database. This instance must be in the same - project and have the same instance configuration as the - instance containing the source backup. Values are of the - form ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The id of the database to create and restore - to. This database must not already exist. The - ``database_id`` appended to ``parent`` forms the full - database name of the form - ``projects//instances//databases/``. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup (str): - Name of the backup from which to restore. Values are of - the form - ``projects//instances//backups/``. - - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_database_v1.types.Database` - A Cloud Spanner database. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database_id, backup] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.RestoreDatabaseRequest): - request = spanner_database_admin.RestoreDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database_id is not None: - request.database_id = database_id - if backup is not None: - request.backup = backup - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_database_admin.Database, - metadata_type=spanner_database_admin.RestoreDatabaseMetadata, - ) - - # Done; return the response. - return response - - def list_database_operations(self, - request: Optional[Union[spanner_database_admin.ListDatabaseOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseOperationsPager: - r"""Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest, dict]): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - parent (str): - Required. The instance of the database operations. - Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseOperationsRequest): - request = spanner_database_admin.ListDatabaseOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_database_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabaseOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_operations(self, - request: Optional[Union[backup.ListBackupOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupOperationsPager: - r"""Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest, dict]): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - parent (str): - Required. The instance of the backup operations. Values - are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup.ListBackupOperationsRequest): - request = backup.ListBackupOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_database_roles(self, - request: Optional[Union[spanner_database_admin.ListDatabaseRolesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabaseRolesPager: - r"""Lists Cloud Spanner database roles. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest, dict]): - The request object. The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - parent (str): - Required. The database whose roles should be listed. - Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager: - The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.ListDatabaseRolesRequest): - request = spanner_database_admin.ListDatabaseRolesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_database_roles] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabaseRolesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def add_split_points(self, - request: Optional[Union[spanner_database_admin.AddSplitPointsRequest, dict]] = None, - *, - database: Optional[str] = None, - split_points: Optional[MutableSequence[spanner_database_admin.SplitPoints]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.AddSplitPointsResponse: - r"""Adds split points to specified tables, indexes of a - database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = client.add_split_points(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest, dict]): - The request object. The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - database (str): - Required. The database on whose tables/indexes split - points are to be added. Values are of the form - ``projects//instances//databases/``. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): - Required. The split points to add. - This corresponds to the ``split_points`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse: - The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, split_points] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.AddSplitPointsRequest): - request = spanner_database_admin.AddSplitPointsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if split_points is not None: - request.split_points = split_points - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.add_split_points] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database", request.database), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.CreateBackupScheduleRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - backup_schedule_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Creates a new backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest, dict]): - The request object. The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - parent (str): - Required. The name of the database - that this backup schedule applies to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to - create. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_schedule_id (str): - Required. The Id to use for the backup schedule. The - ``backup_schedule_id`` appended to ``parent`` forms the - full backup schedule name of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``backup_schedule_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_schedule, backup_schedule_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.CreateBackupScheduleRequest): - request = gsad_backup_schedule.CreateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if backup_schedule_id is not None: - request.backup_schedule_id = backup_schedule_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_backup_schedule(self, - request: Optional[Union[backup_schedule.GetBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backup_schedule.BackupSchedule: - r"""Gets backup schedule for the input schedule name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest, dict]): - The request object. The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - name (str): - Required. The name of the schedule to retrieve. Values - are of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.GetBackupScheduleRequest): - request = backup_schedule.GetBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup_schedule(self, - request: Optional[Union[gsad_backup_schedule.UpdateBackupScheduleRequest, dict]] = None, - *, - backup_schedule: Optional[gsad_backup_schedule.BackupSchedule] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Updates a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest, dict]): - The request object. The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to update. - ``backup_schedule.name``, and the fields to be updated - as specified by ``update_mask`` are required. Other - fields are ignored. - - This corresponds to the ``backup_schedule`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which - fields in the BackupSchedule resource - should be updated. This mask is relative - to the BackupSchedule resource, not to - the request message. The field mask must - always be specified; this prevents any - future fields from being erased - accidentally. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_schedule, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gsad_backup_schedule.UpdateBackupScheduleRequest): - request = gsad_backup_schedule.UpdateBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_schedule is not None: - request.backup_schedule = backup_schedule - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_schedule.name", request.backup_schedule.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup_schedule(self, - request: Optional[Union[backup_schedule.DeleteBackupScheduleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a backup schedule. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest, dict]): - The request object. The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - name (str): - Required. The name of the schedule to delete. Values are - of the form - ``projects//instances//databases//backupSchedules/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.DeleteBackupScheduleRequest): - request = backup_schedule.DeleteBackupScheduleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_schedule] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_backup_schedules(self, - request: Optional[Union[backup_schedule.ListBackupSchedulesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupSchedulesPager: - r"""Lists all the backup schedules for the database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest, dict]): - The request object. The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - parent (str): - Required. Database is the parent - resource whose backup schedules should - be listed. Values are of the form - projects//instances//databases/ - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager: - The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backup_schedule.ListBackupSchedulesRequest): - request = backup_schedule.ListBackupSchedulesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_schedules] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupSchedulesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def internal_update_graph_operation(self, - request: Optional[Union[spanner_database_admin.InternalUpdateGraphOperationRequest, dict]] = None, - *, - database: Optional[str] = None, - operation_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: - r"""This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_database_v1 - - def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest, dict]): - The request object. Internal request proto, do not use - directly. - database (str): - Internal field, do not use directly. - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - operation_id (str): - Internal field, do not use directly. - This corresponds to the ``operation_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse: - Internal response proto, do not use - directly. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, operation_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_database_admin.InternalUpdateGraphOperationRequest): - request = spanner_database_admin.InternalUpdateGraphOperationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if operation_id is not None: - request.operation_id = operation_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.internal_update_graph_operation] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DatabaseAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "DatabaseAdminClient", -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py deleted file mode 100644 index 5a5b92e7d1..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/pagers.py +++ /dev/null @@ -1,864 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.longrunning import operations_pb2 # type: ignore - - -class ListDatabasesPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabasesResponse], - request: spanner_database_admin.ListDatabasesRequest, - response: spanner_database_admin.ListDatabasesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_database_admin.Database]: - for page in self.pages: - yield from page.databases - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesAsyncPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabasesResponse]], - request: spanner_database_admin.ListDatabasesRequest, - response: spanner_database_admin.ListDatabasesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_database_admin.Database]: - async def async_generator(): - async for page in self.pages: - for response in page.databases: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup.ListBackupsResponse], - request: backup.ListBackupsRequest, - response: backup.ListBackupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backup.Backup]: - for page in self.pages: - yield from page.backups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsAsyncPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup.ListBackupsResponse]], - request: backup.ListBackupsRequest, - response: backup.ListBackupsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backup.Backup]: - async def async_generator(): - async for page in self.pages: - for response in page.backups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseOperationsPager: - """A pager for iterating through ``list_database_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabaseOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabaseOperationsResponse], - request: spanner_database_admin.ListDatabaseOperationsRequest, - response: spanner_database_admin.ListDatabaseOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabaseOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseOperationsAsyncPager: - """A pager for iterating through ``list_database_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabaseOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]], - request: spanner_database_admin.ListDatabaseOperationsRequest, - response: spanner_database_admin.ListDatabaseOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupOperationsPager: - """A pager for iterating through ``list_backup_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup.ListBackupOperationsResponse], - request: backup.ListBackupOperationsRequest, - response: backup.ListBackupOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup.ListBackupOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupOperationsAsyncPager: - """A pager for iterating through ``list_backup_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup.ListBackupOperationsResponse]], - request: backup.ListBackupOperationsRequest, - response: backup.ListBackupOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup.ListBackupOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup.ListBackupOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseRolesPager: - """A pager for iterating through ``list_database_roles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``database_roles`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabaseRoles`` requests and continue to iterate - through the ``database_roles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_database_admin.ListDatabaseRolesResponse], - request: spanner_database_admin.ListDatabaseRolesRequest, - response: spanner_database_admin.ListDatabaseRolesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseRolesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_database_admin.ListDatabaseRolesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_database_admin.DatabaseRole]: - for page in self.pages: - yield from page.database_roles - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabaseRolesAsyncPager: - """A pager for iterating through ``list_database_roles`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``database_roles`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabaseRoles`` requests and continue to iterate - through the ``database_roles`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_database_admin.ListDatabaseRolesResponse]], - request: spanner_database_admin.ListDatabaseRolesRequest, - response: spanner_database_admin.ListDatabaseRolesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_database_admin.ListDatabaseRolesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_database_admin.ListDatabaseRolesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_database_admin.DatabaseRole]: - async def async_generator(): - async for page in self.pages: - for response in page.database_roles: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupSchedulesPager: - """A pager for iterating through ``list_backup_schedules`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backup_schedules`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupSchedules`` requests and continue to iterate - through the ``backup_schedules`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backup_schedule.ListBackupSchedulesResponse], - request: backup_schedule.ListBackupSchedulesRequest, - response: backup_schedule.ListBackupSchedulesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup_schedule.ListBackupSchedulesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backup_schedule.ListBackupSchedulesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backup_schedule.BackupSchedule]: - for page in self.pages: - yield from page.backup_schedules - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupSchedulesAsyncPager: - """A pager for iterating through ``list_backup_schedules`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backup_schedules`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupSchedules`` requests and continue to iterate - through the ``backup_schedules`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backup_schedule.ListBackupSchedulesResponse]], - request: backup_schedule.ListBackupSchedulesRequest, - response: backup_schedule.ListBackupSchedulesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest): - The initial request object. - response (google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backup_schedule.ListBackupSchedulesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backup_schedule.ListBackupSchedulesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backup_schedule.BackupSchedule]: - async def async_generator(): - async for page in self.pages: - for response in page.backup_schedules: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst deleted file mode 100644 index f70c023a98..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DatabaseAdminTransport` is the ABC for all transports. -- public child `DatabaseAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DatabaseAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDatabaseAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DatabaseAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py deleted file mode 100644 index 975834b54d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DatabaseAdminTransport -from .grpc import DatabaseAdminGrpcTransport -from .grpc_asyncio import DatabaseAdminGrpcAsyncIOTransport -from .rest import DatabaseAdminRestTransport -from .rest import DatabaseAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DatabaseAdminTransport]] -_transport_registry['grpc'] = DatabaseAdminGrpcTransport -_transport_registry['grpc_asyncio'] = DatabaseAdminGrpcAsyncIOTransport -_transport_registry['rest'] = DatabaseAdminRestTransport - -__all__ = ( - 'DatabaseAdminTransport', - 'DatabaseAdminGrpcTransport', - 'DatabaseAdminGrpcAsyncIOTransport', - 'DatabaseAdminRestTransport', - 'DatabaseAdminRestInterceptor', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py deleted file mode 100644 index 7d801501d3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py +++ /dev/null @@ -1,795 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.spanner_admin_database_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DatabaseAdminTransport(abc.ABC): - """Abstract transport class for DatabaseAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database: gapic_v1.method.wrap_method( - self.update_database, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database_ddl: gapic_v1.method.wrap_method( - self.update_database_ddl, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.drop_database: gapic_v1.method.wrap_method( - self.drop_database, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database_ddl: gapic_v1.method.wrap_method( - self.get_database_ddl, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.create_backup: gapic_v1.method.wrap_method( - self.create_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.copy_backup: gapic_v1.method.wrap_method( - self.copy_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup: gapic_v1.method.wrap_method( - self.get_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup: gapic_v1.method.wrap_method( - self.update_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup: gapic_v1.method.wrap_method( - self.delete_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backups: gapic_v1.method.wrap_method( - self.list_backups, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.restore_database: gapic_v1.method.wrap_method( - self.restore_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_operations: gapic_v1.method.wrap_method( - self.list_database_operations, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_operations: gapic_v1.method.wrap_method( - self.list_backup_operations, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_roles: gapic_v1.method.wrap_method( - self.list_database_roles, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.add_split_points: gapic_v1.method.wrap_method( - self.add_split_points, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_backup_schedule: gapic_v1.method.wrap_method( - self.create_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup_schedule: gapic_v1.method.wrap_method( - self.get_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup_schedule: gapic_v1.method.wrap_method( - self.update_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup_schedule: gapic_v1.method.wrap_method( - self.delete_backup_schedule, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_schedules: gapic_v1.method.wrap_method( - self.list_backup_schedules, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.internal_update_graph_operation: gapic_v1.method.wrap_method( - self.internal_update_graph_operation, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - Union[ - spanner_database_admin.ListDatabasesResponse, - Awaitable[spanner_database_admin.ListDatabasesResponse] - ]]: - raise NotImplementedError() - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - Union[ - spanner_database_admin.Database, - Awaitable[spanner_database_admin.Database] - ]]: - raise NotImplementedError() - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - Union[ - spanner_database_admin.GetDatabaseDdlResponse, - Awaitable[spanner_database_admin.GetDatabaseDdlResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - Union[ - backup.Backup, - Awaitable[backup.Backup] - ]]: - raise NotImplementedError() - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - Union[ - gsad_backup.Backup, - Awaitable[gsad_backup.Backup] - ]]: - raise NotImplementedError() - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - Union[ - backup.ListBackupsResponse, - Awaitable[backup.ListBackupsResponse] - ]]: - raise NotImplementedError() - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - Union[ - spanner_database_admin.ListDatabaseOperationsResponse, - Awaitable[spanner_database_admin.ListDatabaseOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - Union[ - backup.ListBackupOperationsResponse, - Awaitable[backup.ListBackupOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - Union[ - spanner_database_admin.ListDatabaseRolesResponse, - Awaitable[spanner_database_admin.ListDatabaseRolesResponse] - ]]: - raise NotImplementedError() - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - Union[ - spanner_database_admin.AddSplitPointsResponse, - Awaitable[spanner_database_admin.AddSplitPointsResponse] - ]]: - raise NotImplementedError() - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - Union[ - gsad_backup_schedule.BackupSchedule, - Awaitable[gsad_backup_schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - Union[ - backup_schedule.BackupSchedule, - Awaitable[backup_schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - Union[ - gsad_backup_schedule.BackupSchedule, - Awaitable[gsad_backup_schedule.BackupSchedule] - ]]: - raise NotImplementedError() - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - Union[ - backup_schedule.ListBackupSchedulesResponse, - Awaitable[backup_schedule.ListBackupSchedulesResponse] - ]]: - raise NotImplementedError() - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - Union[ - spanner_database_admin.InternalUpdateGraphOperationResponse, - Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DatabaseAdminTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py deleted file mode 100644 index 8f548acc7c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py +++ /dev/null @@ -1,1285 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DatabaseAdminGrpcTransport(DatabaseAdminTransport): - """gRPC backend transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - spanner_database_admin.ListDatabasesResponse]: - r"""Return a callable for the list databases method over gRPC. - - Lists Cloud Spanner databases. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', - request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the create database method over gRPC. - - Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', - request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - spanner_database_admin.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets the state of a Cloud Spanner database. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', - request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, - response_deserializer=spanner_database_admin.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the update database method over gRPC. - - Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.UpdateDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', - request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database'] - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - operations_pb2.Operation]: - r"""Return a callable for the update database ddl method over gRPC. - - Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Returns: - Callable[[~.UpdateDatabaseDdlRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database_ddl' not in self._stubs: - self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', - request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database_ddl'] - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - empty_pb2.Empty]: - r"""Return a callable for the drop database method over gRPC. - - Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - Returns: - Callable[[~.DropDatabaseRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'drop_database' not in self._stubs: - self._stubs['drop_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', - request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['drop_database'] - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - spanner_database_admin.GetDatabaseDdlResponse]: - r"""Return a callable for the get database ddl method over gRPC. - - Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Returns: - Callable[[~.GetDatabaseDdlRequest], - ~.GetDatabaseDdlResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database_ddl' not in self._stubs: - self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', - request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, - response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, - ) - return self._stubs['get_database_ddl'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the create backup method over gRPC. - - Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Returns: - Callable[[~.CreateBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup' not in self._stubs: - self._stubs['create_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', - request_serializer=gsad_backup.CreateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup'] - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the copy backup method over gRPC. - - Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - Returns: - Callable[[~.CopyBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'copy_backup' not in self._stubs: - self._stubs['copy_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', - request_serializer=backup.CopyBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['copy_backup'] - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - backup.Backup]: - r"""Return a callable for the get backup method over gRPC. - - Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.GetBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', - request_serializer=backup.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - gsad_backup.Backup]: - r"""Return a callable for the update backup method over gRPC. - - Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.UpdateBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', - request_serializer=gsad_backup.UpdateBackupRequest.serialize, - response_deserializer=gsad_backup.Backup.deserialize, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.DeleteBackupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', - request_serializer=backup.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - backup.ListBackupsResponse]: - r"""Return a callable for the list backups method over gRPC. - - Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Returns: - Callable[[~.ListBackupsRequest], - ~.ListBackupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', - request_serializer=backup.ListBackupsRequest.serialize, - response_deserializer=backup.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - r"""Return a callable for the restore database method over gRPC. - - Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Returns: - Callable[[~.RestoreDatabaseRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', - request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - spanner_database_admin.ListDatabaseOperationsResponse]: - r"""Return a callable for the list database operations method over gRPC. - - Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Returns: - Callable[[~.ListDatabaseOperationsRequest], - ~.ListDatabaseOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_operations' not in self._stubs: - self._stubs['list_database_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', - request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, - ) - return self._stubs['list_database_operations'] - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - backup.ListBackupOperationsResponse]: - r"""Return a callable for the list backup operations method over gRPC. - - Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Returns: - Callable[[~.ListBackupOperationsRequest], - ~.ListBackupOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_operations' not in self._stubs: - self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', - request_serializer=backup.ListBackupOperationsRequest.serialize, - response_deserializer=backup.ListBackupOperationsResponse.deserialize, - ) - return self._stubs['list_backup_operations'] - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - spanner_database_admin.ListDatabaseRolesResponse]: - r"""Return a callable for the list database roles method over gRPC. - - Lists Cloud Spanner database roles. - - Returns: - Callable[[~.ListDatabaseRolesRequest], - ~.ListDatabaseRolesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_roles' not in self._stubs: - self._stubs['list_database_roles'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', - request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, - ) - return self._stubs['list_database_roles'] - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - spanner_database_admin.AddSplitPointsResponse]: - r"""Return a callable for the add split points method over gRPC. - - Adds split points to specified tables, indexes of a - database. - - Returns: - Callable[[~.AddSplitPointsRequest], - ~.AddSplitPointsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'add_split_points' not in self._stubs: - self._stubs['add_split_points'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', - request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, - response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, - ) - return self._stubs['add_split_points'] - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a new backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_schedule' not in self._stubs: - self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', - request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['create_backup_schedule'] - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - backup_schedule.BackupSchedule]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets backup schedule for the input schedule name. - - Returns: - Callable[[~.GetBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_schedule' not in self._stubs: - self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', - request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, - response_deserializer=backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['get_backup_schedule'] - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - ~.BackupSchedule]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_schedule' not in self._stubs: - self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', - request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['update_backup_schedule'] - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_schedule' not in self._stubs: - self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', - request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup_schedule'] - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - backup_schedule.ListBackupSchedulesResponse]: - r"""Return a callable for the list backup schedules method over gRPC. - - Lists all the backup schedules for the database. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - ~.ListBackupSchedulesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_schedules' not in self._stubs: - self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', - request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, - response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs['list_backup_schedules'] - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - spanner_database_admin.InternalUpdateGraphOperationResponse]: - r"""Return a callable for the internal update graph - operation method over gRPC. - - This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - Returns: - Callable[[~.InternalUpdateGraphOperationRequest], - ~.InternalUpdateGraphOperationResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'internal_update_graph_operation' not in self._stubs: - self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', - request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, - response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, - ) - return self._stubs['internal_update_graph_operation'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DatabaseAdminGrpcTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py deleted file mode 100644 index 06cfaa6349..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,1656 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import DatabaseAdminGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DatabaseAdminGrpcAsyncIOTransport(DatabaseAdminTransport): - """gRPC AsyncIO backend transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - Awaitable[spanner_database_admin.ListDatabasesResponse]]: - r"""Return a callable for the list databases method over gRPC. - - Lists Cloud Spanner databases. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases', - request_serializer=spanner_database_admin.ListDatabasesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create database method over gRPC. - - Creates a new Cloud Spanner database and starts to prepare it - for serving. The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format ``/operations/`` and can be - used to track preparation of the database. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase', - request_serializer=spanner_database_admin.CreateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_database'] - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - Awaitable[spanner_database_admin.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets the state of a Cloud Spanner database. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase', - request_serializer=spanner_database_admin.GetDatabaseRequest.serialize, - response_deserializer=spanner_database_admin.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update database method over gRPC. - - Updates a Cloud Spanner database. The returned [long-running - operation][google.longrunning.Operation] can be used to track - the progress of updating the database. If the named database - does not exist, returns ``NOT_FOUND``. - - While the operation is pending: - - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field is set to true. - - Cancelling the operation is best-effort. If the cancellation - succeeds, the operation metadata's - [cancel_time][google.spanner.admin.database.v1.UpdateDatabaseMetadata.cancel_time] - is set, the updates are reverted, and the operation terminates - with a ``CANCELLED`` status. - - New UpdateDatabase requests will return a - ``FAILED_PRECONDITION`` error until the pending operation is - done (returns successfully or with error). - - Reading the database via the API continues to give the - pre-request values. - - Upon completion of the returned operation: - - - The new values are in effect and readable via the API. - - The database's - [reconciling][google.spanner.admin.database.v1.Database.reconciling] - field becomes false. - - The returned [long-running - operation][google.longrunning.Operation] will have a name of the - format - ``projects//instances//databases//operations/`` - and can be used to track the database modification. The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseMetadata][google.spanner.admin.database.v1.UpdateDatabaseMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Database][google.spanner.admin.database.v1.Database], if - successful. - - Returns: - Callable[[~.UpdateDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabase', - request_serializer=spanner_database_admin.UpdateDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database'] - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update database ddl method over gRPC. - - Updates the schema of a Cloud Spanner database by - creating/altering/dropping tables, columns, indexes, etc. The - returned [long-running operation][google.longrunning.Operation] - will have a name of the format - ``/operations/`` and can be used to - track execution of the schema change(s). The - [metadata][google.longrunning.Operation.metadata] field type is - [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. - The operation has no response. - - Returns: - Callable[[~.UpdateDatabaseDdlRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database_ddl' not in self._stubs: - self._stubs['update_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl', - request_serializer=spanner_database_admin.UpdateDatabaseDdlRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_database_ddl'] - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the drop database method over gRPC. - - Drops (aka deletes) a Cloud Spanner database. Completed backups - for the database will be retained according to their - ``expire_time``. Note: Cloud Spanner might continue to accept - requests for a few seconds after the database has been deleted. - - Returns: - Callable[[~.DropDatabaseRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'drop_database' not in self._stubs: - self._stubs['drop_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase', - request_serializer=spanner_database_admin.DropDatabaseRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['drop_database'] - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - Awaitable[spanner_database_admin.GetDatabaseDdlResponse]]: - r"""Return a callable for the get database ddl method over gRPC. - - Returns the schema of a Cloud Spanner database as a list of - formatted DDL statements. This method does not show pending - schema updates, those may be queried using the - [Operations][google.longrunning.Operations] API. - - Returns: - Callable[[~.GetDatabaseDdlRequest], - Awaitable[~.GetDatabaseDdlResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database_ddl' not in self._stubs: - self._stubs['get_database_ddl'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl', - request_serializer=spanner_database_admin.GetDatabaseDdlRequest.serialize, - response_deserializer=spanner_database_admin.GetDatabaseDdlResponse.deserialize, - ) - return self._stubs['get_database_ddl'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on a database or backup resource. - Replaces any existing policy. - - Authorization requires ``spanner.databases.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.setIamPolicy`` - permission on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a database or backup - resource. Returns an empty policy if a database or backup exists - but does not have a policy set. - - Authorization requires ``spanner.databases.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. For - backups, authorization requires ``spanner.backups.getIamPolicy`` - permission on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - database or backup resource. - - Attempting this RPC on a non-existent Cloud Spanner database - will result in a NOT_FOUND error if the user has - ``spanner.databases.list`` permission on the containing Cloud - Spanner instance. Otherwise returns an empty set of permissions. - Calling this method on a backup that does not exist will result - in a NOT_FOUND error if the user has ``spanner.backups.list`` - permission on the containing instance. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create backup method over gRPC. - - Starts creating a new Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track creation of the backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - creation and delete the backup. There can be only one pending - backup creation per database. Backup creation of different - databases can run concurrently. - - Returns: - Callable[[~.CreateBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup' not in self._stubs: - self._stubs['create_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackup', - request_serializer=gsad_backup.CreateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup'] - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the copy backup method over gRPC. - - Starts copying a Cloud Spanner Backup. The returned backup - [long-running operation][google.longrunning.Operation] will have - a name of the format - ``projects//instances//backups//operations/`` - and can be used to track copying of the backup. The operation is - associated with the destination backup. The - [metadata][google.longrunning.Operation.metadata] field type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - The [response][google.longrunning.Operation.response] field type - is [Backup][google.spanner.admin.database.v1.Backup], if - successful. Cancelling the returned operation will stop the - copying and delete the destination backup. Concurrent CopyBackup - requests can run on the same source backup. - - Returns: - Callable[[~.CopyBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'copy_backup' not in self._stubs: - self._stubs['copy_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CopyBackup', - request_serializer=backup.CopyBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['copy_backup'] - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - Awaitable[backup.Backup]]: - r"""Return a callable for the get backup method over gRPC. - - Gets metadata on a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.GetBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackup', - request_serializer=backup.GetBackupRequest.serialize, - response_deserializer=backup.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - Awaitable[gsad_backup.Backup]]: - r"""Return a callable for the update backup method over gRPC. - - Updates a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.UpdateBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackup', - request_serializer=gsad_backup.UpdateBackupRequest.serialize, - response_deserializer=gsad_backup.Backup.deserialize, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a pending or completed - [Backup][google.spanner.admin.database.v1.Backup]. - - Returns: - Callable[[~.DeleteBackupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackup', - request_serializer=backup.DeleteBackupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup'] - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - Awaitable[backup.ListBackupsResponse]]: - r"""Return a callable for the list backups method over gRPC. - - Lists completed and pending backups. Backups returned are - ordered by ``create_time`` in descending order, starting from - the most recent ``create_time``. - - Returns: - Callable[[~.ListBackupsRequest], - Awaitable[~.ListBackupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackups', - request_serializer=backup.ListBackupsRequest.serialize, - response_deserializer=backup.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restore database method over gRPC. - - Create a new database by restoring from a completed backup. The - new database must be in the same project and in an instance with - the same instance configuration as the instance containing the - backup. The returned database [long-running - operation][google.longrunning.Operation] has a name of the - format - ``projects//instances//databases//operations/``, - and can be used to track the progress of the operation, and to - cancel it. The [metadata][google.longrunning.Operation.metadata] - field type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - The [response][google.longrunning.Operation.response] type is - [Database][google.spanner.admin.database.v1.Database], if - successful. Cancelling the returned operation will stop the - restore and delete the database. There can be only one database - being restored into an instance at a time. Once the restore - operation completes, a new restore operation can be initiated, - without waiting for the optimize operation associated with the - first restore to complete. - - Returns: - Callable[[~.RestoreDatabaseRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_database' not in self._stubs: - self._stubs['restore_database'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/RestoreDatabase', - request_serializer=spanner_database_admin.RestoreDatabaseRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_database'] - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - Awaitable[spanner_database_admin.ListDatabaseOperationsResponse]]: - r"""Return a callable for the list database operations method over gRPC. - - Lists database - [longrunning-operations][google.longrunning.Operation]. A - database operation has a name of the form - ``projects//instances//databases//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. - - Returns: - Callable[[~.ListDatabaseOperationsRequest], - Awaitable[~.ListDatabaseOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_operations' not in self._stubs: - self._stubs['list_database_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseOperations', - request_serializer=spanner_database_admin.ListDatabaseOperationsRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseOperationsResponse.deserialize, - ) - return self._stubs['list_database_operations'] - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - Awaitable[backup.ListBackupOperationsResponse]]: - r"""Return a callable for the list backup operations method over gRPC. - - Lists the backup [long-running - operations][google.longrunning.Operation] in the given instance. - A backup operation has a name of the form - ``projects//instances//backups//operations/``. - The long-running operation - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.progress.start_time`` in descending - order starting from the most recently started operation. - - Returns: - Callable[[~.ListBackupOperationsRequest], - Awaitable[~.ListBackupOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_operations' not in self._stubs: - self._stubs['list_backup_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupOperations', - request_serializer=backup.ListBackupOperationsRequest.serialize, - response_deserializer=backup.ListBackupOperationsResponse.deserialize, - ) - return self._stubs['list_backup_operations'] - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - Awaitable[spanner_database_admin.ListDatabaseRolesResponse]]: - r"""Return a callable for the list database roles method over gRPC. - - Lists Cloud Spanner database roles. - - Returns: - Callable[[~.ListDatabaseRolesRequest], - Awaitable[~.ListDatabaseRolesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_database_roles' not in self._stubs: - self._stubs['list_database_roles'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabaseRoles', - request_serializer=spanner_database_admin.ListDatabaseRolesRequest.serialize, - response_deserializer=spanner_database_admin.ListDatabaseRolesResponse.deserialize, - ) - return self._stubs['list_database_roles'] - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - Awaitable[spanner_database_admin.AddSplitPointsResponse]]: - r"""Return a callable for the add split points method over gRPC. - - Adds split points to specified tables, indexes of a - database. - - Returns: - Callable[[~.AddSplitPointsRequest], - Awaitable[~.AddSplitPointsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'add_split_points' not in self._stubs: - self._stubs['add_split_points'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/AddSplitPoints', - request_serializer=spanner_database_admin.AddSplitPointsRequest.serialize, - response_deserializer=spanner_database_admin.AddSplitPointsResponse.deserialize, - ) - return self._stubs['add_split_points'] - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - Awaitable[gsad_backup_schedule.BackupSchedule]]: - r"""Return a callable for the create backup schedule method over gRPC. - - Creates a new backup schedule. - - Returns: - Callable[[~.CreateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_schedule' not in self._stubs: - self._stubs['create_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/CreateBackupSchedule', - request_serializer=gsad_backup_schedule.CreateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['create_backup_schedule'] - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - Awaitable[backup_schedule.BackupSchedule]]: - r"""Return a callable for the get backup schedule method over gRPC. - - Gets backup schedule for the input schedule name. - - Returns: - Callable[[~.GetBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_schedule' not in self._stubs: - self._stubs['get_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/GetBackupSchedule', - request_serializer=backup_schedule.GetBackupScheduleRequest.serialize, - response_deserializer=backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['get_backup_schedule'] - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - Awaitable[gsad_backup_schedule.BackupSchedule]]: - r"""Return a callable for the update backup schedule method over gRPC. - - Updates a backup schedule. - - Returns: - Callable[[~.UpdateBackupScheduleRequest], - Awaitable[~.BackupSchedule]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_schedule' not in self._stubs: - self._stubs['update_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/UpdateBackupSchedule', - request_serializer=gsad_backup_schedule.UpdateBackupScheduleRequest.serialize, - response_deserializer=gsad_backup_schedule.BackupSchedule.deserialize, - ) - return self._stubs['update_backup_schedule'] - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete backup schedule method over gRPC. - - Deletes a backup schedule. - - Returns: - Callable[[~.DeleteBackupScheduleRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_schedule' not in self._stubs: - self._stubs['delete_backup_schedule'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/DeleteBackupSchedule', - request_serializer=backup_schedule.DeleteBackupScheduleRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_backup_schedule'] - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - Awaitable[backup_schedule.ListBackupSchedulesResponse]]: - r"""Return a callable for the list backup schedules method over gRPC. - - Lists all the backup schedules for the database. - - Returns: - Callable[[~.ListBackupSchedulesRequest], - Awaitable[~.ListBackupSchedulesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_schedules' not in self._stubs: - self._stubs['list_backup_schedules'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/ListBackupSchedules', - request_serializer=backup_schedule.ListBackupSchedulesRequest.serialize, - response_deserializer=backup_schedule.ListBackupSchedulesResponse.deserialize, - ) - return self._stubs['list_backup_schedules'] - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - Awaitable[spanner_database_admin.InternalUpdateGraphOperationResponse]]: - r"""Return a callable for the internal update graph - operation method over gRPC. - - This is an internal API called by Spanner Graph jobs. - You should never need to call this API directly. - - Returns: - Callable[[~.InternalUpdateGraphOperationRequest], - Awaitable[~.InternalUpdateGraphOperationResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'internal_update_graph_operation' not in self._stubs: - self._stubs['internal_update_graph_operation'] = self._logged_channel.unary_unary( - '/google.spanner.admin.database.v1.DatabaseAdmin/InternalUpdateGraphOperation', - request_serializer=spanner_database_admin.InternalUpdateGraphOperationRequest.serialize, - response_deserializer=spanner_database_admin.InternalUpdateGraphOperationResponse.deserialize, - ) - return self._stubs['internal_update_graph_operation'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_databases: self._wrap_method( - self.list_databases, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_database: self._wrap_method( - self.create_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database: self._wrap_method( - self.get_database, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database: self._wrap_method( - self.update_database, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_database_ddl: self._wrap_method( - self.update_database_ddl, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.drop_database: self._wrap_method( - self.drop_database, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_database_ddl: self._wrap_method( - self.get_database_ddl, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.create_backup: self._wrap_method( - self.create_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.copy_backup: self._wrap_method( - self.copy_backup, - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup: self._wrap_method( - self.get_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup: self._wrap_method( - self.update_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup: self._wrap_method( - self.delete_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backups: self._wrap_method( - self.list_backups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.restore_database: self._wrap_method( - self.restore_database, - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_operations: self._wrap_method( - self.list_database_operations, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_operations: self._wrap_method( - self.list_backup_operations, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_database_roles: self._wrap_method( - self.list_database_roles, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.add_split_points: self._wrap_method( - self.add_split_points, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_backup_schedule: self._wrap_method( - self.create_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_backup_schedule: self._wrap_method( - self.get_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.update_backup_schedule: self._wrap_method( - self.update_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_backup_schedule: self._wrap_method( - self.delete_backup_schedule, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_backup_schedules: self._wrap_method( - self.list_backup_schedules, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.internal_update_graph_operation: self._wrap_method( - self.internal_update_graph_operation, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'DatabaseAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py deleted file mode 100644 index 012c64468c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py +++ /dev/null @@ -1,5336 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseDatabaseAdminRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class DatabaseAdminRestInterceptor: - """Interceptor for DatabaseAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DatabaseAdminRestTransport. - - .. code-block:: python - class MyCustomDatabaseAdminInterceptor(DatabaseAdminRestInterceptor): - def pre_add_split_points(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_add_split_points(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_copy_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_copy_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_drop_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database_ddl(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database_ddl(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_internal_update_graph_operation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_internal_update_graph_operation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backups(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backups(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_schedules(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_schedules(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_database_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_database_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_database_roles(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_database_roles(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_databases(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_databases(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restore_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restore_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup_schedule(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup_schedule(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database_ddl(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database_ddl(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DatabaseAdminRestTransport(interceptor=MyCustomDatabaseAdminInterceptor()) - client = DatabaseAdminClient(transport=transport) - - - """ - def pre_add_split_points(self, request: spanner_database_admin.AddSplitPointsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for add_split_points - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_add_split_points(self, response: spanner_database_admin.AddSplitPointsResponse) -> spanner_database_admin.AddSplitPointsResponse: - """Post-rpc interceptor for add_split_points - - DEPRECATED. Please use the `post_add_split_points_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_add_split_points` interceptor runs - before the `post_add_split_points_with_metadata` interceptor. - """ - return response - - def post_add_split_points_with_metadata(self, response: spanner_database_admin.AddSplitPointsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.AddSplitPointsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for add_split_points - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_add_split_points_with_metadata` - interceptor in new development instead of the `post_add_split_points` interceptor. - When both interceptors are used, this `post_add_split_points_with_metadata` interceptor runs after the - `post_add_split_points` interceptor. The (possibly modified) response returned by - `post_add_split_points` will be passed to - `post_add_split_points_with_metadata`. - """ - return response, metadata - - def pre_copy_backup(self, request: backup.CopyBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.CopyBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for copy_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_copy_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for copy_backup - - DEPRECATED. Please use the `post_copy_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_copy_backup` interceptor runs - before the `post_copy_backup_with_metadata` interceptor. - """ - return response - - def post_copy_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for copy_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_copy_backup_with_metadata` - interceptor in new development instead of the `post_copy_backup` interceptor. - When both interceptors are used, this `post_copy_backup_with_metadata` interceptor runs after the - `post_copy_backup` interceptor. The (possibly modified) response returned by - `post_copy_backup` will be passed to - `post_copy_backup_with_metadata`. - """ - return response, metadata - - def pre_create_backup(self, request: gsad_backup.CreateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.CreateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_create_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_backup - - DEPRECATED. Please use the `post_create_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_create_backup` interceptor runs - before the `post_create_backup_with_metadata` interceptor. - """ - return response - - def post_create_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_create_backup_with_metadata` - interceptor in new development instead of the `post_create_backup` interceptor. - When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the - `post_create_backup` interceptor. The (possibly modified) response returned by - `post_create_backup` will be passed to - `post_create_backup_with_metadata`. - """ - return response, metadata - - def pre_create_backup_schedule(self, request: gsad_backup_schedule.CreateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.CreateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_create_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: - """Post-rpc interceptor for create_backup_schedule - - DEPRECATED. Please use the `post_create_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_create_backup_schedule` interceptor runs - before the `post_create_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_create_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_create_backup_schedule_with_metadata` - interceptor in new development instead of the `post_create_backup_schedule` interceptor. - When both interceptors are used, this `post_create_backup_schedule_with_metadata` interceptor runs after the - `post_create_backup_schedule` interceptor. The (possibly modified) response returned by - `post_create_backup_schedule` will be passed to - `post_create_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_create_database(self, request: spanner_database_admin.CreateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_create_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_database - - DEPRECATED. Please use the `post_create_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_create_database` interceptor runs - before the `post_create_database_with_metadata` interceptor. - """ - return response - - def post_create_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_create_database_with_metadata` - interceptor in new development instead of the `post_create_database` interceptor. - When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the - `post_create_database` interceptor. The (possibly modified) response returned by - `post_create_database` will be passed to - `post_create_database_with_metadata`. - """ - return response, metadata - - def pre_delete_backup(self, request: backup.DeleteBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def pre_delete_backup_schedule(self, request: backup_schedule.DeleteBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.DeleteBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def pre_drop_database(self, request: spanner_database_admin.DropDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.DropDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for drop_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def pre_get_backup(self, request: backup.GetBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_backup(self, response: backup.Backup) -> backup.Backup: - """Post-rpc interceptor for get_backup - - DEPRECATED. Please use the `post_get_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_backup` interceptor runs - before the `post_get_backup_with_metadata` interceptor. - """ - return response - - def post_get_backup_with_metadata(self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_backup_with_metadata` - interceptor in new development instead of the `post_get_backup` interceptor. - When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the - `post_get_backup` interceptor. The (possibly modified) response returned by - `post_get_backup` will be passed to - `post_get_backup_with_metadata`. - """ - return response, metadata - - def pre_get_backup_schedule(self, request: backup_schedule.GetBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.GetBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_backup_schedule(self, response: backup_schedule.BackupSchedule) -> backup_schedule.BackupSchedule: - """Post-rpc interceptor for get_backup_schedule - - DEPRECATED. Please use the `post_get_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_backup_schedule` interceptor runs - before the `post_get_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_get_backup_schedule_with_metadata(self, response: backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_backup_schedule_with_metadata` - interceptor in new development instead of the `post_get_backup_schedule` interceptor. - When both interceptors are used, this `post_get_backup_schedule_with_metadata` interceptor runs after the - `post_get_backup_schedule` interceptor. The (possibly modified) response returned by - `post_get_backup_schedule` will be passed to - `post_get_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_get_database(self, request: spanner_database_admin.GetDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_database(self, response: spanner_database_admin.Database) -> spanner_database_admin.Database: - """Post-rpc interceptor for get_database - - DEPRECATED. Please use the `post_get_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_database` interceptor runs - before the `post_get_database_with_metadata` interceptor. - """ - return response - - def post_get_database_with_metadata(self, response: spanner_database_admin.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_database_with_metadata` - interceptor in new development instead of the `post_get_database` interceptor. - When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the - `post_get_database` interceptor. The (possibly modified) response returned by - `post_get_database` will be passed to - `post_get_database_with_metadata`. - """ - return response, metadata - - def pre_get_database_ddl(self, request: spanner_database_admin.GetDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_database_ddl - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_database_ddl(self, response: spanner_database_admin.GetDatabaseDdlResponse) -> spanner_database_admin.GetDatabaseDdlResponse: - """Post-rpc interceptor for get_database_ddl - - DEPRECATED. Please use the `post_get_database_ddl_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_database_ddl` interceptor runs - before the `post_get_database_ddl_with_metadata` interceptor. - """ - return response - - def post_get_database_ddl_with_metadata(self, response: spanner_database_admin.GetDatabaseDdlResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.GetDatabaseDdlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database_ddl - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_database_ddl_with_metadata` - interceptor in new development instead of the `post_get_database_ddl` interceptor. - When both interceptors are used, this `post_get_database_ddl_with_metadata` interceptor runs after the - `post_get_database_ddl` interceptor. The (possibly modified) response returned by - `post_get_database_ddl` will be passed to - `post_get_database_ddl_with_metadata`. - """ - return response, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_list_backup_operations(self, request: backup.ListBackupOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_backup_operations(self, response: backup.ListBackupOperationsResponse) -> backup.ListBackupOperationsResponse: - """Post-rpc interceptor for list_backup_operations - - DEPRECATED. Please use the `post_list_backup_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_backup_operations` interceptor runs - before the `post_list_backup_operations_with_metadata` interceptor. - """ - return response - - def post_list_backup_operations_with_metadata(self, response: backup.ListBackupOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backup_operations_with_metadata` - interceptor in new development instead of the `post_list_backup_operations` interceptor. - When both interceptors are used, this `post_list_backup_operations_with_metadata` interceptor runs after the - `post_list_backup_operations` interceptor. The (possibly modified) response returned by - `post_list_backup_operations` will be passed to - `post_list_backup_operations_with_metadata`. - """ - return response, metadata - - def pre_list_backups(self, request: backup.ListBackupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backups - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_backups(self, response: backup.ListBackupsResponse) -> backup.ListBackupsResponse: - """Post-rpc interceptor for list_backups - - DEPRECATED. Please use the `post_list_backups_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_backups` interceptor runs - before the `post_list_backups_with_metadata` interceptor. - """ - return response - - def post_list_backups_with_metadata(self, response: backup.ListBackupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backups - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backups_with_metadata` - interceptor in new development instead of the `post_list_backups` interceptor. - When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the - `post_list_backups` interceptor. The (possibly modified) response returned by - `post_list_backups` will be passed to - `post_list_backups_with_metadata`. - """ - return response, metadata - - def pre_list_backup_schedules(self, request: backup_schedule.ListBackupSchedulesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_schedules - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_backup_schedules(self, response: backup_schedule.ListBackupSchedulesResponse) -> backup_schedule.ListBackupSchedulesResponse: - """Post-rpc interceptor for list_backup_schedules - - DEPRECATED. Please use the `post_list_backup_schedules_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_backup_schedules` interceptor runs - before the `post_list_backup_schedules_with_metadata` interceptor. - """ - return response - - def post_list_backup_schedules_with_metadata(self, response: backup_schedule.ListBackupSchedulesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backup_schedule.ListBackupSchedulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_schedules - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_backup_schedules_with_metadata` - interceptor in new development instead of the `post_list_backup_schedules` interceptor. - When both interceptors are used, this `post_list_backup_schedules_with_metadata` interceptor runs after the - `post_list_backup_schedules` interceptor. The (possibly modified) response returned by - `post_list_backup_schedules` will be passed to - `post_list_backup_schedules_with_metadata`. - """ - return response, metadata - - def pre_list_database_operations(self, request: spanner_database_admin.ListDatabaseOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_database_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_database_operations(self, response: spanner_database_admin.ListDatabaseOperationsResponse) -> spanner_database_admin.ListDatabaseOperationsResponse: - """Post-rpc interceptor for list_database_operations - - DEPRECATED. Please use the `post_list_database_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_database_operations` interceptor runs - before the `post_list_database_operations_with_metadata` interceptor. - """ - return response - - def post_list_database_operations_with_metadata(self, response: spanner_database_admin.ListDatabaseOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_database_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_database_operations_with_metadata` - interceptor in new development instead of the `post_list_database_operations` interceptor. - When both interceptors are used, this `post_list_database_operations_with_metadata` interceptor runs after the - `post_list_database_operations` interceptor. The (possibly modified) response returned by - `post_list_database_operations` will be passed to - `post_list_database_operations_with_metadata`. - """ - return response, metadata - - def pre_list_database_roles(self, request: spanner_database_admin.ListDatabaseRolesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_database_roles - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_database_roles(self, response: spanner_database_admin.ListDatabaseRolesResponse) -> spanner_database_admin.ListDatabaseRolesResponse: - """Post-rpc interceptor for list_database_roles - - DEPRECATED. Please use the `post_list_database_roles_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_database_roles` interceptor runs - before the `post_list_database_roles_with_metadata` interceptor. - """ - return response - - def post_list_database_roles_with_metadata(self, response: spanner_database_admin.ListDatabaseRolesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabaseRolesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_database_roles - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_database_roles_with_metadata` - interceptor in new development instead of the `post_list_database_roles` interceptor. - When both interceptors are used, this `post_list_database_roles_with_metadata` interceptor runs after the - `post_list_database_roles` interceptor. The (possibly modified) response returned by - `post_list_database_roles` will be passed to - `post_list_database_roles_with_metadata`. - """ - return response, metadata - - def pre_list_databases(self, request: spanner_database_admin.ListDatabasesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_databases - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_databases(self, response: spanner_database_admin.ListDatabasesResponse) -> spanner_database_admin.ListDatabasesResponse: - """Post-rpc interceptor for list_databases - - DEPRECATED. Please use the `post_list_databases_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_list_databases` interceptor runs - before the `post_list_databases_with_metadata` interceptor. - """ - return response - - def post_list_databases_with_metadata(self, response: spanner_database_admin.ListDatabasesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_databases - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_list_databases_with_metadata` - interceptor in new development instead of the `post_list_databases` interceptor. - When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the - `post_list_databases` interceptor. The (possibly modified) response returned by - `post_list_databases` will be passed to - `post_list_databases_with_metadata`. - """ - return response, metadata - - def pre_restore_database(self, request: spanner_database_admin.RestoreDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.RestoreDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for restore_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_restore_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for restore_database - - DEPRECATED. Please use the `post_restore_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_restore_database` interceptor runs - before the `post_restore_database_with_metadata` interceptor. - """ - return response - - def post_restore_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restore_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_restore_database_with_metadata` - interceptor in new development instead of the `post_restore_database` interceptor. - When both interceptors are used, this `post_restore_database_with_metadata` interceptor runs after the - `post_restore_database` interceptor. The (possibly modified) response returned by - `post_restore_database` will be passed to - `post_restore_database_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_backup(self, request: gsad_backup.UpdateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.UpdateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_backup(self, response: gsad_backup.Backup) -> gsad_backup.Backup: - """Post-rpc interceptor for update_backup - - DEPRECATED. Please use the `post_update_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_backup` interceptor runs - before the `post_update_backup_with_metadata` interceptor. - """ - return response - - def post_update_backup_with_metadata(self, response: gsad_backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_backup_with_metadata` - interceptor in new development instead of the `post_update_backup` interceptor. - When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the - `post_update_backup` interceptor. The (possibly modified) response returned by - `post_update_backup` will be passed to - `post_update_backup_with_metadata`. - """ - return response, metadata - - def pre_update_backup_schedule(self, request: gsad_backup_schedule.UpdateBackupScheduleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.UpdateBackupScheduleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_backup_schedule - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_backup_schedule(self, response: gsad_backup_schedule.BackupSchedule) -> gsad_backup_schedule.BackupSchedule: - """Post-rpc interceptor for update_backup_schedule - - DEPRECATED. Please use the `post_update_backup_schedule_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_backup_schedule` interceptor runs - before the `post_update_backup_schedule_with_metadata` interceptor. - """ - return response - - def post_update_backup_schedule_with_metadata(self, response: gsad_backup_schedule.BackupSchedule, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gsad_backup_schedule.BackupSchedule, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup_schedule - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_backup_schedule_with_metadata` - interceptor in new development instead of the `post_update_backup_schedule` interceptor. - When both interceptors are used, this `post_update_backup_schedule_with_metadata` interceptor runs after the - `post_update_backup_schedule` interceptor. The (possibly modified) response returned by - `post_update_backup_schedule` will be passed to - `post_update_backup_schedule_with_metadata`. - """ - return response, metadata - - def pre_update_database(self, request: spanner_database_admin.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_database(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_database - - DEPRECATED. Please use the `post_update_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_database` interceptor runs - before the `post_update_database_with_metadata` interceptor. - """ - return response - - def post_update_database_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_database_with_metadata` - interceptor in new development instead of the `post_update_database` interceptor. - When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the - `post_update_database` interceptor. The (possibly modified) response returned by - `post_update_database` will be passed to - `post_update_database_with_metadata`. - """ - return response, metadata - - def pre_update_database_ddl(self, request: spanner_database_admin.UpdateDatabaseDdlRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_database_admin.UpdateDatabaseDdlRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_database_ddl - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_update_database_ddl(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_database_ddl - - DEPRECATED. Please use the `post_update_database_ddl_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. This `post_update_database_ddl` interceptor runs - before the `post_update_database_ddl_with_metadata` interceptor. - """ - return response - - def post_update_database_ddl_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database_ddl - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DatabaseAdmin server but before it is returned to user code. - - We recommend only using this `post_update_database_ddl_with_metadata` - interceptor in new development instead of the `post_update_database_ddl` interceptor. - When both interceptors are used, this `post_update_database_ddl_with_metadata` interceptor runs after the - `post_update_database_ddl` interceptor. The (possibly modified) response returned by - `post_update_database_ddl` will be passed to - `post_update_database_ddl_with_metadata`. - """ - return response, metadata - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DatabaseAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the DatabaseAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DatabaseAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DatabaseAdminRestInterceptor - - -class DatabaseAdminRestTransport(_BaseDatabaseAdminRestTransport): - """REST backend synchronous transport for DatabaseAdmin. - - Cloud Spanner Database Admin API - - The Cloud Spanner Database Admin API can be used to: - - - create, drop, and list databases - - update the schema of pre-existing databases - - create, delete, copy and list backups for a database - - restore a database from an existing backup - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DatabaseAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DatabaseAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _AddSplitPoints(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.AddSplitPoints") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.AddSplitPointsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.AddSplitPointsResponse: - r"""Call the add split points method over HTTP. - - Args: - request (~.spanner_database_admin.AddSplitPointsRequest): - The request object. The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.AddSplitPointsResponse: - The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_http_options() - - request, metadata = self._interceptor.pre_add_split_points(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.AddSplitPoints", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "AddSplitPoints", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._AddSplitPoints._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.AddSplitPointsResponse() - pb_resp = spanner_database_admin.AddSplitPointsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_add_split_points(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_add_split_points_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.AddSplitPointsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.add_split_points", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "AddSplitPoints", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CopyBackup(_BaseDatabaseAdminRestTransport._BaseCopyBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CopyBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backup.CopyBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the copy backup method over HTTP. - - Args: - request (~.backup.CopyBackupRequest): - The request object. The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_http_options() - - request, metadata = self._interceptor.pre_copy_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCopyBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CopyBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CopyBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CopyBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_copy_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_copy_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.copy_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CopyBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackup(_BaseDatabaseAdminRestTransport._BaseCreateBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CreateBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup.CreateBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create backup method over HTTP. - - Args: - request (~.gsad_backup.CreateBackupRequest): - The request object. The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_http_options() - - request, metadata = self._interceptor.pre_create_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CreateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CreateBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup_schedule.CreateBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Call the create backup schedule method over HTTP. - - Args: - request (~.gsad_backup_schedule.CreateBackupScheduleRequest): - The request object. The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gsad_backup_schedule.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_create_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CreateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gsad_backup_schedule.BackupSchedule() - pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_schedule_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_backup_schedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDatabase(_BaseDatabaseAdminRestTransport._BaseCreateDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CreateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.CreateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create database method over HTTP. - - Args: - request (~.spanner_database_admin.CreateDatabaseRequest): - The request object. The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_create_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CreateDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CreateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.create_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CreateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackup(_BaseDatabaseAdminRestTransport._BaseDeleteBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DeleteBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.DeleteBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete backup method over HTTP. - - Args: - request (~.backup.DeleteBackupRequest): - The request object. The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DeleteBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DeleteBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteBackupSchedule(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DeleteBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup_schedule.DeleteBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete backup schedule method over HTTP. - - Args: - request (~.backup_schedule.DeleteBackupScheduleRequest): - The request object. The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DeleteBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DeleteBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DropDatabase(_BaseDatabaseAdminRestTransport._BaseDropDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DropDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.DropDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the drop database method over HTTP. - - Args: - request (~.spanner_database_admin.DropDatabaseRequest): - The request object. The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_http_options() - - request, metadata = self._interceptor.pre_drop_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDropDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DropDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DropDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DropDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetBackup(_BaseDatabaseAdminRestTransport._BaseGetBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.GetBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup.Backup: - r"""Call the get backup method over HTTP. - - Args: - request (~.backup.GetBackupRequest): - The request object. The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.Backup: - A backup of a Cloud Spanner database. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_http_options() - - request, metadata = self._interceptor.pre_get_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.Backup() - pb_resp = backup.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup.Backup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackupSchedule(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup_schedule.GetBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup_schedule.BackupSchedule: - r"""Call the get backup schedule method over HTTP. - - Args: - request (~.backup_schedule.GetBackupScheduleRequest): - The request object. The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup_schedule.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_get_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup_schedule.BackupSchedule() - pb_resp = backup_schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_schedule_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup_schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_backup_schedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabase(_BaseDatabaseAdminRestTransport._BaseGetDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.GetDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.Database: - r"""Call the get database method over HTTP. - - Args: - request (~.spanner_database_admin.GetDatabaseRequest): - The request object. The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.Database: - A Cloud Spanner database. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_http_options() - - request, metadata = self._interceptor.pre_get_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.Database() - pb_resp = spanner_database_admin.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetDatabaseDdl") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.GetDatabaseDdlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.GetDatabaseDdlResponse: - r"""Call the get database ddl method over HTTP. - - Args: - request (~.spanner_database_admin.GetDatabaseDdlRequest): - The request object. The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.GetDatabaseDdlResponse: - The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_http_options() - - request, metadata = self._interceptor.pre_get_database_ddl(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetDatabaseDdl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabaseDdl", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.GetDatabaseDdlResponse() - pb_resp = spanner_database_admin.GetDatabaseDdlResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database_ddl(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_ddl_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.GetDatabaseDdlResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_database_ddl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetDatabaseDdl", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetIamPolicy(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.get_iam_policy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _InternalUpdateGraphOperation(_BaseDatabaseAdminRestTransport._BaseInternalUpdateGraphOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.InternalUpdateGraphOperation") - - def __call__(self, - request: spanner_database_admin.InternalUpdateGraphOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.InternalUpdateGraphOperationResponse: - raise NotImplementedError( - "Method InternalUpdateGraphOperation is not available over REST transport" - ) - class _ListBackupOperations(_BaseDatabaseAdminRestTransport._BaseListBackupOperations, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListBackupOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.ListBackupOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup.ListBackupOperationsResponse: - r"""Call the list backup operations method over HTTP. - - Args: - request (~.backup.ListBackupOperationsRequest): - The request object. The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.ListBackupOperationsResponse: - The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_operations(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListBackupOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.ListBackupOperationsResponse() - pb_resp = backup.ListBackupOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup.ListBackupOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_operations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackups(_BaseDatabaseAdminRestTransport._BaseListBackups, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListBackups") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup.ListBackupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup.ListBackupsResponse: - r"""Call the list backups method over HTTP. - - Args: - request (~.backup.ListBackupsRequest): - The request object. The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup.ListBackupsResponse: - The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListBackups._get_http_options() - - request, metadata = self._interceptor.pre_list_backups(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackups._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListBackups._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackups", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackups", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListBackups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup.ListBackupsResponse() - pb_resp = backup.ListBackupsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backups(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backups_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup.ListBackupsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backups", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackups", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackupSchedules(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListBackupSchedules") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backup_schedule.ListBackupSchedulesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backup_schedule.ListBackupSchedulesResponse: - r"""Call the list backup schedules method over HTTP. - - Args: - request (~.backup_schedule.ListBackupSchedulesRequest): - The request object. The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backup_schedule.ListBackupSchedulesResponse: - The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_schedules(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListBackupSchedules", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupSchedules", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListBackupSchedules._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backup_schedule.ListBackupSchedulesResponse() - pb_resp = backup_schedule.ListBackupSchedulesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_schedules(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_schedules_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backup_schedule.ListBackupSchedulesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_backup_schedules", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListBackupSchedules", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabaseOperations(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListDatabaseOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.ListDatabaseOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.ListDatabaseOperationsResponse: - r"""Call the list database operations method over HTTP. - - Args: - request (~.spanner_database_admin.ListDatabaseOperationsRequest): - The request object. The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.ListDatabaseOperationsResponse: - The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_database_operations(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListDatabaseOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.ListDatabaseOperationsResponse() - pb_resp = spanner_database_admin.ListDatabaseOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_database_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_database_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.ListDatabaseOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_operations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabaseRoles(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListDatabaseRoles") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.ListDatabaseRolesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.ListDatabaseRolesResponse: - r"""Call the list database roles method over HTTP. - - Args: - request (~.spanner_database_admin.ListDatabaseRolesRequest): - The request object. The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.ListDatabaseRolesResponse: - The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_http_options() - - request, metadata = self._interceptor.pre_list_database_roles(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabaseRoles", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseRoles", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListDatabaseRoles._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.ListDatabaseRolesResponse() - pb_resp = spanner_database_admin.ListDatabaseRolesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_database_roles(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_database_roles_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.ListDatabaseRolesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_database_roles", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabaseRoles", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabases(_BaseDatabaseAdminRestTransport._BaseListDatabases, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListDatabases") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_database_admin.ListDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_database_admin.ListDatabasesResponse: - r"""Call the list databases method over HTTP. - - Args: - request (~.spanner_database_admin.ListDatabasesRequest): - The request object. The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_database_admin.ListDatabasesResponse: - The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_http_options() - - request, metadata = self._interceptor.pre_list_databases(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListDatabases._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListDatabases", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabases", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListDatabases._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_database_admin.ListDatabasesResponse() - pb_resp = spanner_database_admin.ListDatabasesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_databases(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_databases_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_database_admin.ListDatabasesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.list_databases", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListDatabases", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RestoreDatabase(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.RestoreDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.RestoreDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore database method over HTTP. - - Args: - request (~.spanner_database_admin.RestoreDatabaseRequest): - The request object. The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_http_options() - - request, metadata = self._interceptor.pre_restore_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.RestoreDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "RestoreDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._RestoreDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restore_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restore_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.restore_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "RestoreDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.SetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.set_iam_policy", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.TestIamPermissions", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.test_iam_permissions", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackup(_BaseDatabaseAdminRestTransport._BaseUpdateBackup, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup.UpdateBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gsad_backup.Backup: - r"""Call the update backup method over HTTP. - - Args: - request (~.gsad_backup.UpdateBackupRequest): - The request object. The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gsad_backup.Backup: - A backup of a Cloud Spanner database. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_http_options() - - request, metadata = self._interceptor.pre_update_backup(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gsad_backup.Backup() - pb_resp = gsad_backup.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gsad_backup.Backup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackupSchedule(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateBackupSchedule") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gsad_backup_schedule.UpdateBackupScheduleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gsad_backup_schedule.BackupSchedule: - r"""Call the update backup schedule method over HTTP. - - Args: - request (~.gsad_backup_schedule.UpdateBackupScheduleRequest): - The request object. The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gsad_backup_schedule.BackupSchedule: - BackupSchedule expresses the - automated backup creation specification - for a Spanner database. Next ID: 10 - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_http_options() - - request, metadata = self._interceptor.pre_update_backup_schedule(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateBackupSchedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackupSchedule", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateBackupSchedule._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gsad_backup_schedule.BackupSchedule() - pb_resp = gsad_backup_schedule.BackupSchedule.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup_schedule(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_schedule_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gsad_backup_schedule.BackupSchedule.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_backup_schedule", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateBackupSchedule", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabase(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.UpdateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update database method over HTTP. - - Args: - request (~.spanner_database_admin.UpdateDatabaseRequest): - The request object. The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_update_database(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabase", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabaseDdl(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.UpdateDatabaseDdl") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_database_admin.UpdateDatabaseDdlRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update database ddl method over HTTP. - - Args: - request (~.spanner_database_admin.UpdateDatabaseDdlRequest): - The request object. Enqueues the given DDL statements to be applied, in - order but not necessarily all at once, to the database - schema at some point (or points) in the future. The - server checks that the statements are executable - (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon - later execution (e.g., if a statement from another batch - of statements is applied first and it conflicts in some - way, or if there is some data-related problem like a - ``NULL`` value in a column to which ``NOT NULL`` would - be added). If a statement fails, all subsequent - statements in the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be - used with the - [Operations][google.longrunning.Operations] API to - monitor progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_http_options() - - request, metadata = self._interceptor.pre_update_database_ddl(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_transcoded_request(http_options, request) - - body = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.UpdateDatabaseDdl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabaseDdl", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._UpdateDatabaseDdl._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database_ddl(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_ddl_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminClient.update_database_ddl", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "UpdateDatabaseDdl", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def add_split_points(self) -> Callable[ - [spanner_database_admin.AddSplitPointsRequest], - spanner_database_admin.AddSplitPointsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AddSplitPoints(self._session, self._host, self._interceptor) # type: ignore - - @property - def copy_backup(self) -> Callable[ - [backup.CopyBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CopyBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup(self) -> Callable[ - [gsad_backup.CreateBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.CreateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_database(self) -> Callable[ - [spanner_database_admin.CreateDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup(self) -> Callable[ - [backup.DeleteBackupRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_schedule(self) -> Callable[ - [backup_schedule.DeleteBackupScheduleRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def drop_database(self) -> Callable[ - [spanner_database_admin.DropDatabaseRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DropDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup(self) -> Callable[ - [backup.GetBackupRequest], - backup.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_schedule(self) -> Callable[ - [backup_schedule.GetBackupScheduleRequest], - backup_schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database(self) -> Callable[ - [spanner_database_admin.GetDatabaseRequest], - spanner_database_admin.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database_ddl(self) -> Callable[ - [spanner_database_admin.GetDatabaseDdlRequest], - spanner_database_admin.GetDatabaseDdlResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def internal_update_graph_operation(self) -> Callable[ - [spanner_database_admin.InternalUpdateGraphOperationRequest], - spanner_database_admin.InternalUpdateGraphOperationResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InternalUpdateGraphOperation(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_operations(self) -> Callable[ - [backup.ListBackupOperationsRequest], - backup.ListBackupOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backups(self) -> Callable[ - [backup.ListBackupsRequest], - backup.ListBackupsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_schedules(self) -> Callable[ - [backup_schedule.ListBackupSchedulesRequest], - backup_schedule.ListBackupSchedulesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupSchedules(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_database_operations(self) -> Callable[ - [spanner_database_admin.ListDatabaseOperationsRequest], - spanner_database_admin.ListDatabaseOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabaseOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_database_roles(self) -> Callable[ - [spanner_database_admin.ListDatabaseRolesRequest], - spanner_database_admin.ListDatabaseRolesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabaseRoles(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_databases(self) -> Callable[ - [spanner_database_admin.ListDatabasesRequest], - spanner_database_admin.ListDatabasesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore - - @property - def restore_database(self) -> Callable[ - [spanner_database_admin.RestoreDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestoreDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup(self) -> Callable[ - [gsad_backup.UpdateBackupRequest], - gsad_backup.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup_schedule(self) -> Callable[ - [gsad_backup_schedule.UpdateBackupScheduleRequest], - gsad_backup_schedule.BackupSchedule]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackupSchedule(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database_ddl(self) -> Callable[ - [spanner_database_admin.UpdateDatabaseDdlRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabaseDdl(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseDatabaseAdminRestTransport._BaseCancelOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.CancelOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseDatabaseAdminRestTransport._BaseDeleteOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.DeleteOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseDatabaseAdminRestTransport._BaseGetOperation, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseDatabaseAdminRestTransport._BaseListOperations, DatabaseAdminRestStub): - def __hash__(self): - return hash("DatabaseAdminRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseDatabaseAdminRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseDatabaseAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDatabaseAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.database_v1.DatabaseAdminClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DatabaseAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.database_v1.DatabaseAdminAsyncClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.database.v1.DatabaseAdmin", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DatabaseAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py deleted file mode 100644 index 0cfaa08199..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest_base.py +++ /dev/null @@ -1,1378 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import DatabaseAdminTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseDatabaseAdminRestTransport(DatabaseAdminTransport): - """Base REST backend transport for DatabaseAdmin. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseAddSplitPoints: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.AddSplitPointsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseAddSplitPoints._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCopyBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/backups:copy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.CopyBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCopyBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/backups', - 'body': 'backup', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup.CreateBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupScheduleId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', - 'body': 'backup_schedule', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup_schedule.CreateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/databases', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseCreateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.DeleteBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup_schedule.DeleteBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseDeleteBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDropDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.DropDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseDropDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.GetBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup_schedule.GetBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabaseDdl: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.GetDatabaseDdlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetDatabaseDdl._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:getIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:getIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseInternalUpdateGraphOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseListBackupOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/backupOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.ListBackupOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackups: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/backups', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup.ListBackupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackups._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackupSchedules: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backup_schedule.ListBackupSchedulesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListBackupSchedules._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabaseOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/databaseOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.ListDatabaseOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabaseRoles: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.ListDatabaseRolesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabaseRoles._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabases: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/databases', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseListDatabases._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRestoreDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/databases:restore', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.RestoreDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseRestoreDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/backups/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/backupSchedules/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*/databases/*/databaseRoles/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup.name=projects/*/instances/*/backups/*}', - 'body': 'backup', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup.UpdateBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackupSchedule: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}', - 'body': 'backup_schedule', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateBackupSchedule._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{database.name=projects/*/instances/*/databases/*}', - 'body': 'database', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabaseDdl: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{database=projects/*/instances/*/databases/*}/ddl', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_database_admin.UpdateDatabaseDdlRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDatabaseAdminRestTransport._BaseUpdateDatabaseDdl._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseDatabaseAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py deleted file mode 100644 index 84cb902d6c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/__init__.py +++ /dev/null @@ -1,148 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .backup import ( - Backup, - BackupInfo, - BackupInstancePartition, - CopyBackupEncryptionConfig, - CopyBackupMetadata, - CopyBackupRequest, - CreateBackupEncryptionConfig, - CreateBackupMetadata, - CreateBackupRequest, - DeleteBackupRequest, - FullBackupSpec, - GetBackupRequest, - IncrementalBackupSpec, - ListBackupOperationsRequest, - ListBackupOperationsResponse, - ListBackupsRequest, - ListBackupsResponse, - UpdateBackupRequest, -) -from .backup_schedule import ( - BackupSchedule, - BackupScheduleSpec, - CreateBackupScheduleRequest, - CrontabSpec, - DeleteBackupScheduleRequest, - GetBackupScheduleRequest, - ListBackupSchedulesRequest, - ListBackupSchedulesResponse, - UpdateBackupScheduleRequest, -) -from .common import ( - EncryptionConfig, - EncryptionInfo, - OperationProgress, - DatabaseDialect, -) -from .spanner_database_admin import ( - AddSplitPointsRequest, - AddSplitPointsResponse, - CreateDatabaseMetadata, - CreateDatabaseRequest, - Database, - DatabaseRole, - DdlStatementActionInfo, - DropDatabaseRequest, - GetDatabaseDdlRequest, - GetDatabaseDdlResponse, - GetDatabaseRequest, - InternalUpdateGraphOperationRequest, - InternalUpdateGraphOperationResponse, - ListDatabaseOperationsRequest, - ListDatabaseOperationsResponse, - ListDatabaseRolesRequest, - ListDatabaseRolesResponse, - ListDatabasesRequest, - ListDatabasesResponse, - OptimizeRestoredDatabaseMetadata, - RestoreDatabaseEncryptionConfig, - RestoreDatabaseMetadata, - RestoreDatabaseRequest, - RestoreInfo, - SplitPoints, - UpdateDatabaseDdlMetadata, - UpdateDatabaseDdlRequest, - UpdateDatabaseMetadata, - UpdateDatabaseRequest, - RestoreSourceType, -) - -__all__ = ( - 'Backup', - 'BackupInfo', - 'BackupInstancePartition', - 'CopyBackupEncryptionConfig', - 'CopyBackupMetadata', - 'CopyBackupRequest', - 'CreateBackupEncryptionConfig', - 'CreateBackupMetadata', - 'CreateBackupRequest', - 'DeleteBackupRequest', - 'FullBackupSpec', - 'GetBackupRequest', - 'IncrementalBackupSpec', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'UpdateBackupRequest', - 'BackupSchedule', - 'BackupScheduleSpec', - 'CreateBackupScheduleRequest', - 'CrontabSpec', - 'DeleteBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'UpdateBackupScheduleRequest', - 'EncryptionConfig', - 'EncryptionInfo', - 'OperationProgress', - 'DatabaseDialect', - 'AddSplitPointsRequest', - 'AddSplitPointsResponse', - 'CreateDatabaseMetadata', - 'CreateDatabaseRequest', - 'Database', - 'DatabaseRole', - 'DdlStatementActionInfo', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'GetDatabaseRequest', - 'InternalUpdateGraphOperationRequest', - 'InternalUpdateGraphOperationResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'ListDatabaseRolesRequest', - 'ListDatabaseRolesResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'OptimizeRestoredDatabaseMetadata', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'RestoreDatabaseRequest', - 'RestoreInfo', - 'SplitPoints', - 'UpdateDatabaseDdlMetadata', - 'UpdateDatabaseDdlRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseRequest', - 'RestoreSourceType', -) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py deleted file mode 100644 index d89cfa44b5..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup.py +++ /dev/null @@ -1,1097 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'Backup', - 'CreateBackupRequest', - 'CreateBackupMetadata', - 'CopyBackupRequest', - 'CopyBackupMetadata', - 'UpdateBackupRequest', - 'GetBackupRequest', - 'DeleteBackupRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListBackupOperationsRequest', - 'ListBackupOperationsResponse', - 'BackupInfo', - 'CreateBackupEncryptionConfig', - 'CopyBackupEncryptionConfig', - 'FullBackupSpec', - 'IncrementalBackupSpec', - 'BackupInstancePartition', - }, -) - - -class Backup(proto.Message): - r"""A backup of a Cloud Spanner database. - - Attributes: - database (str): - Required for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. Name of the database from which this backup was - created. This needs to be in the same instance as the - backup. Values are of the form - ``projects//instances//databases/``. - version_time (google.protobuf.timestamp_pb2.Timestamp): - The backup will contain an externally consistent copy of the - database at the timestamp specified by ``version_time``. If - ``version_time`` is not specified, the system will set - ``version_time`` to the ``create_time`` of the backup. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. The expiration time of the backup, with - microseconds granularity that must be at least 6 hours and - at most 366 days from the time the CreateBackup request is - processed. Once the ``expire_time`` has passed, the backup - is eligible to be automatically deleted by Cloud Spanner to - free the resources used by the backup. - name (str): - Output only for the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. Required for the - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup] - operation. - - A globally unique identifier for the backup which cannot be - changed. Values are of the form - ``projects//instances//backups/[a-z][a-z0-9_\-]*[a-z0-9]`` - The final segment of the name must be between 2 and 60 - characters in length. - - The backup is stored in the location(s) specified in the - instance configuration of the instance containing the - backup, identified by the prefix of the backup name of the - form ``projects//instances/``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request is received. If the request does not specify - ``version_time``, the ``version_time`` of the backup will be - equivalent to the ``create_time``. - size_bytes (int): - Output only. Size of the backup in bytes. - freeable_size_bytes (int): - Output only. The number of bytes that will be - freed by deleting this backup. This value will - be zero if, for example, this backup is part of - an incremental backup chain and younger backups - in the chain require that we keep its data. For - backups not in an incremental backup chain, this - is always the size of the backup. This value may - change if backups on the same chain get created, - deleted or expired. - exclusive_size_bytes (int): - Output only. For a backup in an incremental - backup chain, this is the storage space needed - to keep the data that has changed since the - previous backup. For all other backups, this is - always the size of the backup. This value may - change if backups on the same chain get deleted - or expired. - - This field can be used to calculate the total - storage space used by a set of backups. For - example, the total space used by all backups of - a database can be computed by summing up this - field. - state (google.cloud.spanner_admin_database_v1.types.Backup.State): - Output only. The current state of the backup. - referencing_databases (MutableSequence[str]): - Output only. The names of the restored databases that - reference the backup. The database names are of the form - ``projects//instances//databases/``. - Referencing databases may exist in different instances. The - existence of any referencing database prevents the backup - from being deleted. When a restored database from the backup - enters the ``READY`` state, the reference to the backup is - removed. - encryption_info (google.cloud.spanner_admin_database_v1.types.EncryptionInfo): - Output only. The encryption information for - the backup. - encryption_information (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): - Output only. The encryption information for the backup, - whether it is protected by one or more KMS keys. The - information includes all Cloud KMS key versions used to - encrypt the backup. The - ``encryption_status' field inside of each``\ EncryptionInfo\` - is not populated. At least one of the key versions must be - available for the backup to be restored. If a key version is - revoked in the middle of a restore, the restore behavior is - undefined. - database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): - Output only. The database dialect information - for the backup. - referencing_backups (MutableSequence[str]): - Output only. The names of the destination backups being - created by copying this source backup. The backup names are - of the form - ``projects//instances//backups/``. - Referencing backups may exist in different instances. The - existence of any referencing backup prevents the backup from - being deleted. When the copy operation is done (either - successfully completed or cancelled or the destination - backup is deleted), the reference to the backup is removed. - max_expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The max allowed expiration time of the backup, - with microseconds granularity. A backup's expiration time - can be configured in multiple APIs: CreateBackup, - UpdateBackup, CopyBackup. When updating or copying an - existing backup, the expiration time specified must be less - than ``Backup.max_expire_time``. - backup_schedules (MutableSequence[str]): - Output only. List of backup schedule URIs - that are associated with creating this backup. - This is only applicable for scheduled backups, - and is empty for on-demand backups. - - To optimize for storage, whenever possible, - multiple schedules are collapsed together to - create one backup. In such cases, this field - captures the list of all backup schedule URIs - that are associated with creating this backup. - If collapsing is not done, then this field - captures the single backup schedule URI - associated with creating this backup. - incremental_backup_chain_id (str): - Output only. Populated only for backups in an incremental - backup chain. Backups share the same chain id if and only if - they belong to the same incremental backup chain. Use this - field to determine which backups are part of the same - incremental backup chain. The ordering of backups in the - chain can be determined by ordering the backup - ``version_time``. - oldest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Data deleted at a time older - than this is guaranteed not to be retained in - order to support this backup. For a backup in an - incremental backup chain, this is the version - time of the oldest backup that exists or ever - existed in the chain. For all other backups, - this is the version time of the backup. This - field can be used to understand what data is - being retained by the backup system. - instance_partitions (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupInstancePartition]): - Output only. The instance partition(s) storing the backup. - - This is the same as the list of the instance partition(s) - that the database had footprint in at the backup's - ``version_time``. - """ - class State(proto.Enum): - r"""Indicates the current state of the backup. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The pending backup is still being created. Operations on the - backup may fail with ``FAILED_PRECONDITION`` in this state. - READY (2): - The backup is complete and ready for use. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - database: str = proto.Field( - proto.STRING, - number=2, - ) - version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - size_bytes: int = proto.Field( - proto.INT64, - number=5, - ) - freeable_size_bytes: int = proto.Field( - proto.INT64, - number=15, - ) - exclusive_size_bytes: int = proto.Field( - proto.INT64, - number=16, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - referencing_databases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - encryption_info: common.EncryptionInfo = proto.Field( - proto.MESSAGE, - number=8, - message=common.EncryptionInfo, - ) - encryption_information: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( - proto.MESSAGE, - number=13, - message=common.EncryptionInfo, - ) - database_dialect: common.DatabaseDialect = proto.Field( - proto.ENUM, - number=10, - enum=common.DatabaseDialect, - ) - referencing_backups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=11, - ) - max_expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - backup_schedules: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=14, - ) - incremental_backup_chain_id: str = proto.Field( - proto.STRING, - number=17, - ) - oldest_version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=18, - message=timestamp_pb2.Timestamp, - ) - instance_partitions: MutableSequence['BackupInstancePartition'] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message='BackupInstancePartition', - ) - - -class CreateBackupRequest(proto.Message): - r"""The request for - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - - Attributes: - parent (str): - Required. The name of the instance in which the backup will - be created. This must be the same instance that contains the - database the backup will be created from. The backup will be - stored in the location(s) specified in the instance - configuration of this instance. Values are of the form - ``projects//instances/``. - backup_id (str): - Required. The id of the backup to be created. The - ``backup_id`` appended to ``parent`` forms the full backup - name of the form - ``projects//instances//backups/``. - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to create. - encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): - Optional. The encryption configuration used to encrypt the - backup. If this field is not specified, the backup will use - the same encryption configuration as the database by - default, namely - [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] - = ``USE_DATABASE_ENCRYPTION``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup: 'Backup' = proto.Field( - proto.MESSAGE, - number=3, - message='Backup', - ) - encryption_config: 'CreateBackupEncryptionConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='CreateBackupEncryptionConfig', - ) - - -class CreateBackupMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup]. - - Attributes: - name (str): - The name of the backup being created. - database (str): - The name of the database the backup is - created from. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of this operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=3, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class CopyBackupRequest(proto.Message): - r"""The request for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - - Attributes: - parent (str): - Required. The name of the destination instance that will - contain the backup copy. Values are of the form: - ``projects//instances/``. - backup_id (str): - Required. The id of the backup copy. The ``backup_id`` - appended to ``parent`` forms the full backup_uri of the form - ``projects//instances//backups/``. - source_backup (str): - Required. The source backup to be copied. The source backup - needs to be in READY state for it to be copied. Once - CopyBackup is in progress, the source backup cannot be - deleted or cleaned up on expiration until CopyBackup is - finished. Values are of the form: - ``projects//instances//backups/``. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The expiration time of the backup in microsecond - granularity. The expiration time must be at least 6 hours - and at most 366 days from the ``create_time`` of the source - backup. Once the ``expire_time`` has passed, the backup is - eligible to be automatically deleted by Cloud Spanner to - free the resources used by the backup. - encryption_config (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig): - Optional. The encryption configuration used to encrypt the - backup. If this field is not specified, the backup will use - the same encryption configuration as the source backup by - default, namely - [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] - = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_id: str = proto.Field( - proto.STRING, - number=2, - ) - source_backup: str = proto.Field( - proto.STRING, - number=3, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - encryption_config: 'CopyBackupEncryptionConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='CopyBackupEncryptionConfig', - ) - - -class CopyBackupMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup]. - - Attributes: - name (str): - The name of the backup being created through the copy - operation. Values are of the form - ``projects//instances//backups/``. - source_backup (str): - The name of the source backup that is being copied. Values - are of the form - ``projects//instances//backups/``. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of CopyBackup operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - source_backup: str = proto.Field( - proto.STRING, - number=2, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=3, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateBackupRequest(proto.Message): - r"""The request for - [UpdateBackup][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup]. - - Attributes: - backup (google.cloud.spanner_admin_database_v1.types.Backup): - Required. The backup to update. ``backup.name``, and the - fields to be updated as specified by ``update_mask`` are - required. Other fields are ignored. Update is only supported - for the following fields: - - - ``backup.expire_time``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields (e.g. - ``expire_time``) in the Backup resource should be updated. - This mask is relative to the Backup resource, not to the - request message. The field mask must always be specified; - this prevents any future fields from being erased - accidentally by clients that do not know about them. - """ - - backup: 'Backup' = proto.Field( - proto.MESSAGE, - number=1, - message='Backup', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetBackupRequest(proto.Message): - r"""The request for - [GetBackup][google.spanner.admin.database.v1.DatabaseAdmin.GetBackup]. - - Attributes: - name (str): - Required. Name of the backup. Values are of the form - ``projects//instances//backups/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupRequest(proto.Message): - r"""The request for - [DeleteBackup][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup]. - - Attributes: - name (str): - Required. Name of the backup to delete. Values are of the - form - ``projects//instances//backups/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupsRequest(proto.Message): - r"""The request for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Attributes: - parent (str): - Required. The instance to list backups from. Values are of - the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned backups. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Backup][google.spanner.admin.database.v1.Backup] are - eligible for filtering: - - - ``name`` - - ``database`` - - ``state`` - - ``create_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``expire_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``version_time`` (and values are of the format - YYYY-MM-DDTHH:MM:SSZ) - - ``size_bytes`` - - ``backup_schedules`` - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic, but you can specify AND, OR, and - NOT logic explicitly. - - Here are a few examples: - - - ``name:Howl`` - The backup's name contains the string - "howl". - - ``database:prod`` - The database's name contains the - string "prod". - - ``state:CREATING`` - The backup is pending creation. - - ``state:READY`` - The backup is fully created and ready - for use. - - ``(name:howl) AND (create_time < \"2018-03-28T14:50:00Z\")`` - - The backup name contains the string "howl" and - ``create_time`` of the backup is before - 2018-03-28T14:50:00Z. - - ``expire_time < \"2018-03-28T14:50:00Z\"`` - The backup - ``expire_time`` is before 2018-03-28T14:50:00Z. - - ``size_bytes > 10000000000`` - The backup's size is - greater than 10GB - - ``backup_schedules:daily`` - The backup is created from a - schedule with "daily" in its name. - page_size (int): - Number of backups to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupsResponse.next_page_token] - from a previous - [ListBackupsResponse][google.spanner.admin.database.v1.ListBackupsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupsResponse(proto.Message): - r"""The response for - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups]. - - Attributes: - backups (MutableSequence[google.cloud.spanner_admin_database_v1.types.Backup]): - The list of matching backups. Backups returned are ordered - by ``create_time`` in descending order, starting from the - most recent ``create_time``. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackups][google.spanner.admin.database.v1.DatabaseAdmin.ListBackups] - call to fetch more of the matching backups. - """ - - @property - def raw_page(self): - return self - - backups: MutableSequence['Backup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Backup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListBackupOperationsRequest(proto.Message): - r"""The request for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Attributes: - parent (str): - Required. The instance of the backup operations. Values are - of the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned backup - operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [operation][google.longrunning.Operation] are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] - is - ``type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first if filtering on - metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic, but you can specify AND, OR, and - NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``metadata.database:prod`` - Returns operations where: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - - The source database name of backup contains the string - "prod". - - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``(metadata.name:howl) AND`` - ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata]. - - The backup name contains the string "howl". - - The operation started before 2018-03-28T14:50:00Z. - - The operation resulted in an error. - - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` - ``(metadata.source_backup:test) AND`` - ``(metadata.progress.start_time < \"2022-01-18T14:50:00Z\") AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata]. - - The source backup name contains the string "test". - - The operation started before 2022-01-18T14:50:00Z. - - The operation resulted in an error. - - - ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CreateBackupMetadata) AND`` - ``(metadata.database:test_db)) OR`` - ``((metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.CopyBackupMetadata) AND`` - ``(metadata.source_backup:test_bkp)) AND`` - ``(error:*)`` - Returns operations where: - - - The operation's metadata matches either of criteria: - - - The operation's metadata type is - [CreateBackupMetadata][google.spanner.admin.database.v1.CreateBackupMetadata] - AND the source database name of the backup contains - the string "test_db" - - The operation's metadata type is - [CopyBackupMetadata][google.spanner.admin.database.v1.CopyBackupMetadata] - AND the source backup name contains the string - "test_bkp" - - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupOperationsResponse.next_page_token] - from a previous - [ListBackupOperationsResponse][google.spanner.admin.database.v1.ListBackupOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupOperationsResponse(proto.Message): - r"""The response for - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching backup [long-running - operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the backup's name. The operation's - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that are pending or have - completed/failed/canceled within the last 7 days. Operations - returned are ordered by - ``operation.metadata.value.progress.start_time`` in - descending order starting from the most recently started - operation. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackupOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class BackupInfo(proto.Message): - r"""Information about a backup. - - Attributes: - backup (str): - Name of the backup. - version_time (google.protobuf.timestamp_pb2.Timestamp): - The backup contains an externally consistent copy of - ``source_database`` at the timestamp specified by - ``version_time``. If the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request did not specify ``version_time``, the - ``version_time`` of the backup is equivalent to the - ``create_time``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the - [CreateBackup][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup] - request was received. - source_database (str): - Name of the database the backup was created - from. - """ - - backup: str = proto.Field( - proto.STRING, - number=1, - ) - version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - source_database: str = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateBackupEncryptionConfig(proto.Message): - r"""Encryption configuration for the backup to create. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig.EncryptionType): - Required. The encryption type of the backup. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to protect the - backup. This field should be set only when - [encryption_type][google.spanner.admin.database.v1.CreateBackupEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Optional. Specifies the KMS configuration for the one or - more keys used to protect the backup. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - - The keys referenced by kms_key_names must fully cover all - regions of the backup's instance configuration. Some - examples: - - - For single region instance configs, specify a single - regional location KMS key. - - For multi-regional instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For an instance config of type USER_MANAGED, please - specify only regional location KMS keys to cover each - region in the instance config. Multi-regional location KMS - keys are not supported for USER_MANAGED instance configs. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the backup. - - Values: - ENCRYPTION_TYPE_UNSPECIFIED (0): - Unspecified. Do not use. - USE_DATABASE_ENCRYPTION (1): - Use the same encryption configuration as the database. This - is the default option when - [encryption_config][google.spanner.admin.database.v1.CreateBackupEncryptionConfig] - is empty. For example, if the database is using - ``Customer_Managed_Encryption``, the backup will be using - the same Cloud KMS key as the database. - GOOGLE_DEFAULT_ENCRYPTION (2): - Use Google default encryption. - CUSTOMER_MANAGED_ENCRYPTION (3): - Use customer managed encryption. If specified, - ``kms_key_name`` must contain a valid Cloud KMS key. - """ - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_DATABASE_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type: EncryptionType = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CopyBackupEncryptionConfig(proto.Message): - r"""Encryption configuration for the copied backup. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.CopyBackupEncryptionConfig.EncryptionType): - Required. The encryption type of the backup. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to protect the - backup. This field should be set only when - [encryption_type][google.spanner.admin.database.v1.CopyBackupEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Optional. Specifies the KMS configuration for the one or - more keys used to protect the backup. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - Kms keys specified can be in any order. - - The keys referenced by kms_key_names must fully cover all - regions of the backup's instance configuration. Some - examples: - - - For single region instance configs, specify a single - regional location KMS key. - - For multi-regional instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For an instance config of type USER_MANAGED, please - specify only regional location KMS keys to cover each - region in the instance config. Multi-regional location KMS - keys are not supported for USER_MANAGED instance configs. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the backup. - - Values: - ENCRYPTION_TYPE_UNSPECIFIED (0): - Unspecified. Do not use. - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): - This is the default option for - [CopyBackup][google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup] - when - [encryption_config][google.spanner.admin.database.v1.CopyBackupEncryptionConfig] - is not specified. For example, if the source backup is using - ``Customer_Managed_Encryption``, the backup will be using - the same Cloud KMS key as the source backup. - GOOGLE_DEFAULT_ENCRYPTION (2): - Use Google default encryption. - CUSTOMER_MANAGED_ENCRYPTION (3): - Use customer managed encryption. If specified, either - ``kms_key_name`` or ``kms_key_names`` must contain valid - Cloud KMS key(s). - """ - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type: EncryptionType = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class FullBackupSpec(proto.Message): - r"""The specification for full backups. - A full backup stores the entire contents of the database at a - given version time. - - """ - - -class IncrementalBackupSpec(proto.Message): - r"""The specification for incremental backup chains. - An incremental backup stores the delta of changes between a - previous backup and the database contents at a given version - time. An incremental backup chain consists of a full backup and - zero or more successive incremental backups. The first backup - created for an incremental backup chain is always a full backup. - - """ - - -class BackupInstancePartition(proto.Message): - r"""Instance partition information for the backup. - - Attributes: - instance_partition (str): - A unique identifier for the instance partition. Values are - of the form - ``projects//instances//instancePartitions/`` - """ - - instance_partition: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py deleted file mode 100644 index b69e3b08ee..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/backup_schedule.py +++ /dev/null @@ -1,369 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'BackupScheduleSpec', - 'BackupSchedule', - 'CrontabSpec', - 'CreateBackupScheduleRequest', - 'GetBackupScheduleRequest', - 'DeleteBackupScheduleRequest', - 'ListBackupSchedulesRequest', - 'ListBackupSchedulesResponse', - 'UpdateBackupScheduleRequest', - }, -) - - -class BackupScheduleSpec(proto.Message): - r"""Defines specifications of the backup schedule. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cron_spec (google.cloud.spanner_admin_database_v1.types.CrontabSpec): - Cron style schedule specification. - - This field is a member of `oneof`_ ``schedule_spec``. - """ - - cron_spec: 'CrontabSpec' = proto.Field( - proto.MESSAGE, - number=1, - oneof='schedule_spec', - message='CrontabSpec', - ) - - -class BackupSchedule(proto.Message): - r"""BackupSchedule expresses the automated backup creation - specification for a Spanner database. - Next ID: 10 - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. Output only for the - [CreateBackupSchedule][DatabaseAdmin.CreateBackupSchededule] - operation. Required for the - [UpdateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule] - operation. A globally unique identifier for the backup - schedule which cannot be changed. Values are of the form - ``projects//instances//databases//backupSchedules/[a-z][a-z0-9_\-]*[a-z0-9]`` - The final segment of the name must be between 2 and 60 - characters in length. - spec (google.cloud.spanner_admin_database_v1.types.BackupScheduleSpec): - Optional. The schedule specification based on - which the backup creations are triggered. - retention_duration (google.protobuf.duration_pb2.Duration): - Optional. The retention duration of a backup - that must be at least 6 hours and at most 366 - days. The backup is eligible to be automatically - deleted once the retention period has elapsed. - encryption_config (google.cloud.spanner_admin_database_v1.types.CreateBackupEncryptionConfig): - Optional. The encryption configuration that - will be used to encrypt the backup. If this - field is not specified, the backup will use the - same encryption configuration as the database. - full_backup_spec (google.cloud.spanner_admin_database_v1.types.FullBackupSpec): - The schedule creates only full backups. - - This field is a member of `oneof`_ ``backup_type_spec``. - incremental_backup_spec (google.cloud.spanner_admin_database_v1.types.IncrementalBackupSpec): - The schedule creates incremental backup - chains. - - This field is a member of `oneof`_ ``backup_type_spec``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp at which the - schedule was last updated. If the schedule has - never been updated, this field contains the - timestamp when the schedule was first created. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - spec: 'BackupScheduleSpec' = proto.Field( - proto.MESSAGE, - number=6, - message='BackupScheduleSpec', - ) - retention_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - encryption_config: backup.CreateBackupEncryptionConfig = proto.Field( - proto.MESSAGE, - number=4, - message=backup.CreateBackupEncryptionConfig, - ) - full_backup_spec: backup.FullBackupSpec = proto.Field( - proto.MESSAGE, - number=7, - oneof='backup_type_spec', - message=backup.FullBackupSpec, - ) - incremental_backup_spec: backup.IncrementalBackupSpec = proto.Field( - proto.MESSAGE, - number=8, - oneof='backup_type_spec', - message=backup.IncrementalBackupSpec, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - -class CrontabSpec(proto.Message): - r"""CrontabSpec can be used to specify the version time and - frequency at which the backup should be created. - - Attributes: - text (str): - Required. Textual representation of the crontab. User can - customize the backup frequency and the backup version time - using the cron expression. The version time must be in UTC - timezone. - - The backup will contain an externally consistent copy of the - database at the version time. Allowed frequencies are 12 - hour, 1 day, 1 week and 1 month. Examples of valid cron - specifications: - - - ``0 2/12 * * *`` : every 12 hours at (2, 14) hours past - midnight in UTC. - - ``0 2,14 * * *`` : every 12 hours at (2,14) hours past - midnight in UTC. - - ``0 2 * * *`` : once a day at 2 past midnight in UTC. - - ``0 2 * * 0`` : once a week every Sunday at 2 past - midnight in UTC. - - ``0 2 8 * *`` : once a month on 8th day at 2 past midnight - in UTC. - time_zone (str): - Output only. The time zone of the times in - ``CrontabSpec.text``. Currently only UTC is supported. - creation_window (google.protobuf.duration_pb2.Duration): - Output only. Schedule backups will contain an externally - consistent copy of the database at the version time - specified in ``schedule_spec.cron_spec``. However, Spanner - may not initiate the creation of the scheduled backups at - that version time. Spanner will initiate the creation of - scheduled backups within the time window bounded by the - version_time specified in ``schedule_spec.cron_spec`` and - version_time + ``creation_window``. - """ - - text: str = proto.Field( - proto.STRING, - number=1, - ) - time_zone: str = proto.Field( - proto.STRING, - number=2, - ) - creation_window: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - - -class CreateBackupScheduleRequest(proto.Message): - r"""The request for - [CreateBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule]. - - Attributes: - parent (str): - Required. The name of the database that this - backup schedule applies to. - backup_schedule_id (str): - Required. The Id to use for the backup schedule. The - ``backup_schedule_id`` appended to ``parent`` forms the full - backup schedule name of the form - ``projects//instances//databases//backupSchedules/``. - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_schedule_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup_schedule: 'BackupSchedule' = proto.Field( - proto.MESSAGE, - number=3, - message='BackupSchedule', - ) - - -class GetBackupScheduleRequest(proto.Message): - r"""The request for - [GetBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule]. - - Attributes: - name (str): - Required. The name of the schedule to retrieve. Values are - of the form - ``projects//instances//databases//backupSchedules/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupScheduleRequest(proto.Message): - r"""The request for - [DeleteBackupSchedule][google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule]. - - Attributes: - name (str): - Required. The name of the schedule to delete. Values are of - the form - ``projects//instances//databases//backupSchedules/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBackupSchedulesRequest(proto.Message): - r"""The request for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Attributes: - parent (str): - Required. Database is the parent resource - whose backup schedules should be listed. Values - are of the form - projects//instances//databases/ - page_size (int): - Optional. Number of backup schedules to be - returned in the response. If 0 or less, defaults - to the server's maximum allowed page size. - page_token (str): - Optional. If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListBackupSchedulesResponse.next_page_token] - from a previous - [ListBackupSchedulesResponse][google.spanner.admin.database.v1.ListBackupSchedulesResponse] - to the same ``parent``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupSchedulesResponse(proto.Message): - r"""The response for - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules]. - - Attributes: - backup_schedules (MutableSequence[google.cloud.spanner_admin_database_v1.types.BackupSchedule]): - The list of backup schedules for a database. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListBackupSchedules][google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules] - call to fetch more of the schedules. - """ - - @property - def raw_page(self): - return self - - backup_schedules: MutableSequence['BackupSchedule'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BackupSchedule', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateBackupScheduleRequest(proto.Message): - r"""The request for - [UpdateBackupScheduleRequest][google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule]. - - Attributes: - backup_schedule (google.cloud.spanner_admin_database_v1.types.BackupSchedule): - Required. The backup schedule to update. - ``backup_schedule.name``, and the fields to be updated as - specified by ``update_mask`` are required. Other fields are - ignored. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - the BackupSchedule resource should be updated. - This mask is relative to the BackupSchedule - resource, not to the request message. The field - mask must always be specified; this prevents any - future fields from being erased accidentally. - """ - - backup_schedule: 'BackupSchedule' = proto.Field( - proto.MESSAGE, - number=1, - message='BackupSchedule', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py deleted file mode 100644 index 40606ba0e5..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/common.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'DatabaseDialect', - 'OperationProgress', - 'EncryptionConfig', - 'EncryptionInfo', - }, -) - - -class DatabaseDialect(proto.Enum): - r"""Indicates the dialect type of a database. - - Values: - DATABASE_DIALECT_UNSPECIFIED (0): - Default value. This value will create a database with the - GOOGLE_STANDARD_SQL dialect. - GOOGLE_STANDARD_SQL (1): - GoogleSQL supported SQL. - POSTGRESQL (2): - PostgreSQL supported SQL. - """ - DATABASE_DIALECT_UNSPECIFIED = 0 - GOOGLE_STANDARD_SQL = 1 - POSTGRESQL = 2 - - -class OperationProgress(proto.Message): - r"""Encapsulates progress related information for a Cloud Spanner - long running operation. - - Attributes: - progress_percent (int): - Percent completion of the operation. - Values are between 0 and 100 inclusive. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time the request was received. - end_time (google.protobuf.timestamp_pb2.Timestamp): - If set, the time at which this operation - failed or was completed successfully. - """ - - progress_percent: int = proto.Field( - proto.INT32, - number=1, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class EncryptionConfig(proto.Message): - r"""Encryption configuration for a Cloud Spanner database. - - Attributes: - kms_key_name (str): - The Cloud KMS key to be used for encrypting and decrypting - the database. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Specifies the KMS configuration for the one or more keys - used to encrypt the database. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - - The keys referenced by kms_key_names must fully cover all - regions of the database instance configuration. Some - examples: - - - For single region database instance configs, specify a - single regional location KMS key. - - For multi-regional database instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For a database instance config of type USER_MANAGED, - please specify only regional location KMS keys to cover - each region in the instance config. Multi-regional - location KMS keys are not supported for USER_MANAGED - instance configs. - """ - - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class EncryptionInfo(proto.Message): - r"""Encryption information for a Cloud Spanner database or - backup. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.EncryptionInfo.Type): - Output only. The type of encryption. - encryption_status (google.rpc.status_pb2.Status): - Output only. If present, the status of a - recent encrypt/decrypt call on underlying data - for this database or backup. Regardless of - status, data is always encrypted at rest. - kms_key_version (str): - Output only. A Cloud KMS key version that is - being used to protect the database or backup. - """ - class Type(proto.Enum): - r"""Possible encryption types. - - Values: - TYPE_UNSPECIFIED (0): - Encryption type was not specified, though - data at rest remains encrypted. - GOOGLE_DEFAULT_ENCRYPTION (1): - The data is encrypted at rest with a key that - is fully managed by Google. No key version or - status will be populated. This is the default - state. - CUSTOMER_MANAGED_ENCRYPTION (2): - The data is encrypted at rest with a key that is managed by - the customer. The active version of the key. - ``kms_key_version`` will be populated, and - ``encryption_status`` may be populated. - """ - TYPE_UNSPECIFIED = 0 - GOOGLE_DEFAULT_ENCRYPTION = 1 - CUSTOMER_MANAGED_ENCRYPTION = 2 - - encryption_type: Type = proto.Field( - proto.ENUM, - number=3, - enum=Type, - ) - encryption_status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - kms_key_version: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py deleted file mode 100644 index 3d882c8443..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py +++ /dev/null @@ -1,1348 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.database.v1', - manifest={ - 'RestoreSourceType', - 'RestoreInfo', - 'Database', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'CreateDatabaseRequest', - 'CreateDatabaseMetadata', - 'GetDatabaseRequest', - 'UpdateDatabaseRequest', - 'UpdateDatabaseMetadata', - 'UpdateDatabaseDdlRequest', - 'DdlStatementActionInfo', - 'UpdateDatabaseDdlMetadata', - 'DropDatabaseRequest', - 'GetDatabaseDdlRequest', - 'GetDatabaseDdlResponse', - 'ListDatabaseOperationsRequest', - 'ListDatabaseOperationsResponse', - 'RestoreDatabaseRequest', - 'RestoreDatabaseEncryptionConfig', - 'RestoreDatabaseMetadata', - 'OptimizeRestoredDatabaseMetadata', - 'DatabaseRole', - 'ListDatabaseRolesRequest', - 'ListDatabaseRolesResponse', - 'AddSplitPointsRequest', - 'AddSplitPointsResponse', - 'SplitPoints', - 'InternalUpdateGraphOperationRequest', - 'InternalUpdateGraphOperationResponse', - }, -) - - -class RestoreSourceType(proto.Enum): - r"""Indicates the type of the restore source. - - Values: - TYPE_UNSPECIFIED (0): - No restore associated. - BACKUP (1): - A backup was used as the source of the - restore. - """ - TYPE_UNSPECIFIED = 0 - BACKUP = 1 - - -class RestoreInfo(proto.Message): - r"""Information about the database restore. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): - The type of the restore source. - backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): - Information about the backup used to restore - the database. The backup may no longer exist. - - This field is a member of `oneof`_ ``source_info``. - """ - - source_type: 'RestoreSourceType' = proto.Field( - proto.ENUM, - number=1, - enum='RestoreSourceType', - ) - backup_info: gsad_backup.BackupInfo = proto.Field( - proto.MESSAGE, - number=2, - oneof='source_info', - message=gsad_backup.BackupInfo, - ) - - -class Database(proto.Message): - r"""A Cloud Spanner database. - - Attributes: - name (str): - Required. The name of the database. Values are of the form - ``projects//instances//databases/``, - where ```` is as specified in the - ``CREATE DATABASE`` statement. This name can be passed to - other API methods to identify the database. - state (google.cloud.spanner_admin_database_v1.types.Database.State): - Output only. The current database state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If exists, the time at which the - database creation started. - restore_info (google.cloud.spanner_admin_database_v1.types.RestoreInfo): - Output only. Applicable only for restored - databases. Contains information about the - restore source. - encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): - Output only. For databases that are using - customer managed encryption, this field contains - the encryption configuration for the database. - For databases that are using Google default or - other types of encryption, this field is empty. - encryption_info (MutableSequence[google.cloud.spanner_admin_database_v1.types.EncryptionInfo]): - Output only. For databases that are using customer managed - encryption, this field contains the encryption information - for the database, such as all Cloud KMS key versions that - are in use. The - ``encryption_status' field inside of each``\ EncryptionInfo\` - is not populated. - - For databases that are using Google default or other types - of encryption, this field is empty. - - This field is propagated lazily from the backend. There - might be a delay from when a key version is being used and - when it appears in this field. - version_retention_period (str): - Output only. The period in which Cloud Spanner retains all - versions of data for the database. This is the same as the - value of version_retention_period database option set using - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - Defaults to 1 hour, if not set. - earliest_version_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Earliest timestamp at which - older versions of the data can be read. This - value is continuously updated by Cloud Spanner - and becomes stale the moment it is queried. If - you are using this value to recover data, make - sure to account for the time from the moment - when the value is queried to the moment when you - initiate the recovery. - default_leader (str): - Output only. The read-write region which contains the - database's leader replicas. - - This is the same as the value of default_leader database - option set using DatabaseAdmin.CreateDatabase or - DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this - is empty. - database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): - Output only. The dialect of the Cloud Spanner - Database. - enable_drop_protection (bool): - Whether drop protection is enabled for this database. - Defaults to false, if not set. For more details, please see - how to `prevent accidental database - deletion `__. - reconciling (bool): - Output only. If true, the database is being - updated. If false, there are no ongoing update - operations for the database. - """ - class State(proto.Enum): - r"""Indicates the current state of the database. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The database is still being created. Operations on the - database may fail with ``FAILED_PRECONDITION`` in this - state. - READY (2): - The database is fully created and ready for - use. - READY_OPTIMIZING (3): - The database is fully created and ready for use, but is - still being optimized for performance and cannot handle full - load. - - In this state, the database still references the backup it - was restore from, preventing the backup from being deleted. - When optimizations are complete, the full performance of the - database will be restored, and the database will transition - to ``READY`` state. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - READY_OPTIMIZING = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - state: State = proto.Field( - proto.ENUM, - number=2, - enum=State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - restore_info: 'RestoreInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='RestoreInfo', - ) - encryption_config: common.EncryptionConfig = proto.Field( - proto.MESSAGE, - number=5, - message=common.EncryptionConfig, - ) - encryption_info: MutableSequence[common.EncryptionInfo] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=common.EncryptionInfo, - ) - version_retention_period: str = proto.Field( - proto.STRING, - number=6, - ) - earliest_version_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - default_leader: str = proto.Field( - proto.STRING, - number=9, - ) - database_dialect: common.DatabaseDialect = proto.Field( - proto.ENUM, - number=10, - enum=common.DatabaseDialect, - ) - enable_drop_protection: bool = proto.Field( - proto.BOOL, - number=11, - ) - reconciling: bool = proto.Field( - proto.BOOL, - number=12, - ) - - -class ListDatabasesRequest(proto.Message): - r"""The request for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Attributes: - parent (str): - Required. The instance whose databases should be listed. - Values are of the form - ``projects//instances/``. - page_size (int): - Number of databases to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] - from a previous - [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDatabasesResponse(proto.Message): - r"""The response for - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. - - Attributes: - databases (MutableSequence[google.cloud.spanner_admin_database_v1.types.Database]): - Databases that matched the request. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] - call to fetch more of the matching databases. - """ - - @property - def raw_page(self): - return self - - databases: MutableSequence['Database'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Database', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""The request for - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - - Attributes: - parent (str): - Required. The name of the instance that will serve the new - database. Values are of the form - ``projects//instances/``. - create_statement (str): - Required. A ``CREATE DATABASE`` statement, which specifies - the ID of the new database. The database ID must conform to - the regular expression ``[a-z][a-z0-9_\-]*[a-z0-9]`` and be - between 2 and 30 characters in length. If the database ID is - a reserved word or if it contains a hyphen, the database ID - must be enclosed in backticks (:literal:`\``). - extra_statements (MutableSequence[str]): - Optional. A list of DDL statements to run - inside the newly created database. Statements - can create tables, indexes, etc. These - statements execute atomically with the creation - of the database: - - if there is an error in any statement, the - database is not created. - encryption_config (google.cloud.spanner_admin_database_v1.types.EncryptionConfig): - Optional. The encryption configuration for - the database. If this field is not specified, - Cloud Spanner will encrypt/decrypt all data at - rest using Google default encryption. - database_dialect (google.cloud.spanner_admin_database_v1.types.DatabaseDialect): - Optional. The dialect of the Cloud Spanner - Database. - proto_descriptors (bytes): - Optional. Proto descriptors used by CREATE/ALTER PROTO - BUNDLE statements in 'extra_statements' above. Contains a - protobuf-serialized - `google.protobuf.FileDescriptorSet `__. - To generate it, - `install `__ and - run ``protoc`` with --include_imports and - --descriptor_set_out. For example, to generate for - moon/shot/app.proto, run - - :: - - $protoc --proto_path=/app_path --proto_path=/lib_path \ - --include_imports \ - --descriptor_set_out=descriptors.data \ - moon/shot/app.proto - - For more details, see protobuffer `self - description `__. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - create_statement: str = proto.Field( - proto.STRING, - number=2, - ) - extra_statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - encryption_config: common.EncryptionConfig = proto.Field( - proto.MESSAGE, - number=4, - message=common.EncryptionConfig, - ) - database_dialect: common.DatabaseDialect = proto.Field( - proto.ENUM, - number=5, - enum=common.DatabaseDialect, - ) - proto_descriptors: bytes = proto.Field( - proto.BYTES, - number=6, - ) - - -class CreateDatabaseMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. - - Attributes: - database (str): - The database being created. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseRequest(proto.Message): - r"""The request for - [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. - - Attributes: - name (str): - Required. The name of the requested database. Values are of - the form - ``projects//instances//databases/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseRequest(proto.Message): - r"""The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - - Attributes: - database (google.cloud.spanner_admin_database_v1.types.Database): - Required. The database to update. The ``name`` field of the - database is of the form - ``projects//instances//databases/``. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to update. Currently, only - ``enable_drop_protection`` field can be updated. - """ - - database: 'Database' = proto.Field( - proto.MESSAGE, - number=1, - message='Database', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class UpdateDatabaseMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - - Attributes: - request (google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest): - The request for - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase]. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [UpdateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is - best-effort). - """ - - request: 'UpdateDatabaseRequest' = proto.Field( - proto.MESSAGE, - number=1, - message='UpdateDatabaseRequest', - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateDatabaseDdlRequest(proto.Message): - r"""Enqueues the given DDL statements to be applied, in order but not - necessarily all at once, to the database schema at some point (or - points) in the future. The server checks that the statements are - executable (syntactically valid, name tables that exist, etc.) - before enqueueing them, but they may still fail upon later execution - (e.g., if a statement from another batch of statements is applied - first and it conflicts in some way, or if there is some data-related - problem like a ``NULL`` value in a column to which ``NOT NULL`` - would be added). If a statement fails, all subsequent statements in - the batch are automatically cancelled. - - Each batch of statements is assigned a name which can be used with - the [Operations][google.longrunning.Operations] API to monitor - progress. See the - [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] - field for more details. - - Attributes: - database (str): - Required. The database to update. - statements (MutableSequence[str]): - Required. DDL statements to be applied to the - database. - operation_id (str): - If empty, the new update request is assigned an - automatically-generated operation ID. Otherwise, - ``operation_id`` is used to construct the name of the - resulting [Operation][google.longrunning.Operation]. - - Specifying an explicit operation ID simplifies determining - whether the statements were executed in the event that the - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - call is replayed, or the return value is otherwise lost: the - [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] - and ``operation_id`` fields can be combined to form the - [name][google.longrunning.Operation.name] of the resulting - [longrunning.Operation][google.longrunning.Operation]: - ``/operations/``. - - ``operation_id`` should be unique within the database, and - must be a valid identifier: ``[a-z][a-z0-9_]*``. Note that - automatically-generated operation IDs always begin with an - underscore. If the named operation already exists, - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - returns ``ALREADY_EXISTS``. - proto_descriptors (bytes): - Optional. Proto descriptors used by CREATE/ALTER PROTO - BUNDLE statements. Contains a protobuf-serialized - `google.protobuf.FileDescriptorSet `__. - To generate it, - `install `__ and - run ``protoc`` with --include_imports and - --descriptor_set_out. For example, to generate for - moon/shot/app.proto, run - - :: - - $protoc --proto_path=/app_path --proto_path=/lib_path \ - --include_imports \ - --descriptor_set_out=descriptors.data \ - moon/shot/app.proto - - For more details, see protobuffer `self - description `__. - throughput_mode (bool): - Optional. This field is exposed to be used by the Spanner - Migration Tool. For more details, see - `SMT `__. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - operation_id: str = proto.Field( - proto.STRING, - number=3, - ) - proto_descriptors: bytes = proto.Field( - proto.BYTES, - number=4, - ) - throughput_mode: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DdlStatementActionInfo(proto.Message): - r"""Action information extracted from a DDL statement. This proto is - used to display the brief info of the DDL statement for the - operation - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - - Attributes: - action (str): - The action for the DDL statement, e.g. - CREATE, ALTER, DROP, GRANT, etc. This field is a - non-empty string. - entity_type (str): - The entity type for the DDL statement, e.g. TABLE, INDEX, - VIEW, etc. This field can be empty string for some DDL - statement, e.g. for statement "ANALYZE", ``entity_type`` = - "". - entity_names (MutableSequence[str]): - The entity name(s) being operated on the DDL statement. E.g. - - 1. For statement "CREATE TABLE t1(...)", ``entity_names`` = - ["t1"]. - 2. For statement "GRANT ROLE r1, r2 ...", ``entity_names`` = - ["r1", "r2"]. - 3. For statement "ANALYZE", ``entity_names`` = []. - """ - - action: str = proto.Field( - proto.STRING, - number=1, - ) - entity_type: str = proto.Field( - proto.STRING, - number=2, - ) - entity_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateDatabaseDdlMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. - - Attributes: - database (str): - The database being modified. - statements (MutableSequence[str]): - For an update this list contains all the - statements. For an individual statement, this - list contains only that statement. - commit_timestamps (MutableSequence[google.protobuf.timestamp_pb2.Timestamp]): - Reports the commit timestamps of all statements that have - succeeded so far, where ``commit_timestamps[i]`` is the - commit timestamp for the statement ``statements[i]``. - throttled (bool): - Output only. When true, indicates that the - operation is throttled e.g. due to resource - constraints. When resources become available the - operation will resume and this field will be - false again. - progress (MutableSequence[google.cloud.spanner_admin_database_v1.types.OperationProgress]): - The progress of the - [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] - operations. All DDL statements will have continuously - updating progress, and ``progress[i]`` is the operation - progress for ``statements[i]``. Also, ``progress[i]`` will - have start time and end time populated with commit timestamp - of operation, as well as a progress of 100% once the - operation has completed. - actions (MutableSequence[google.cloud.spanner_admin_database_v1.types.DdlStatementActionInfo]): - The brief action info for the DDL statements. ``actions[i]`` - is the brief info for ``statements[i]``. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - commit_timestamps: MutableSequence[timestamp_pb2.Timestamp] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - throttled: bool = proto.Field( - proto.BOOL, - number=4, - ) - progress: MutableSequence[common.OperationProgress] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=common.OperationProgress, - ) - actions: MutableSequence['DdlStatementActionInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='DdlStatementActionInfo', - ) - - -class DropDatabaseRequest(proto.Message): - r"""The request for - [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. - - Attributes: - database (str): - Required. The database to be dropped. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseDdlRequest(proto.Message): - r"""The request for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - Attributes: - database (str): - Required. The database whose schema we wish to get. Values - are of the form - ``projects//instances//databases/`` - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDatabaseDdlResponse(proto.Message): - r"""The response for - [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. - - Attributes: - statements (MutableSequence[str]): - A list of formatted DDL statements defining - the schema of the database specified in the - request. - proto_descriptors (bytes): - Proto descriptors stored in the database. Contains a - protobuf-serialized - `google.protobuf.FileDescriptorSet `__. - For more details, see protobuffer `self - description `__. - """ - - statements: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - proto_descriptors: bytes = proto.Field( - proto.BYTES, - number=2, - ) - - -class ListDatabaseOperationsRequest(proto.Message): - r"""The request for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Attributes: - parent (str): - Required. The instance of the database operations. Values - are of the form ``projects//instances/``. - filter (str): - An expression that filters the list of returned operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the - [Operation][google.longrunning.Operation] are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata] - is - ``type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first, if filtering - on metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=type.googleapis.com/google.spanner.admin.database.v1.RestoreDatabaseMetadata) AND`` - ``(metadata.source_type:BACKUP) AND`` - ``(metadata.backup_info.backup:backup_howl) AND`` - ``(metadata.name:restored_howl) AND`` - ``(metadata.progress.start_time < \"2018-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [RestoreDatabaseMetadata][google.spanner.admin.database.v1.RestoreDatabaseMetadata]. - - The database is restored from a backup. - - The backup name contains "backup_howl". - - The restored database's name contains "restored_howl". - - The operation started before 2018-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabaseOperationsResponse.next_page_token] - from a previous - [ListDatabaseOperationsResponse][google.spanner.admin.database.v1.ListDatabaseOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDatabaseOperationsResponse(proto.Message): - r"""The response for - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching database [long-running - operations][google.longrunning.Operation]. Each operation's - name will be prefixed by the database's name. The - operation's - [metadata][google.longrunning.Operation.metadata] field type - ``metadata.type_url`` describes the type of the metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabaseOperations][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RestoreDatabaseRequest(proto.Message): - r"""The request for - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The name of the instance in which to create the - restored database. This instance must be in the same project - and have the same instance configuration as the instance - containing the source backup. Values are of the form - ``projects//instances/``. - database_id (str): - Required. The id of the database to create and restore to. - This database must not already exist. The ``database_id`` - appended to ``parent`` forms the full database name of the - form - ``projects//instances//databases/``. - backup (str): - Name of the backup from which to restore. Values are of the - form - ``projects//instances//backups/``. - - This field is a member of `oneof`_ ``source``. - encryption_config (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig): - Optional. An encryption configuration describing the - encryption type and key resources in Cloud KMS used to - encrypt/decrypt the database to restore to. If this field is - not specified, the restored database will use the same - encryption configuration as the backup by default, namely - [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - = ``USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup: str = proto.Field( - proto.STRING, - number=3, - oneof='source', - ) - encryption_config: 'RestoreDatabaseEncryptionConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='RestoreDatabaseEncryptionConfig', - ) - - -class RestoreDatabaseEncryptionConfig(proto.Message): - r"""Encryption configuration for the restored database. - - Attributes: - encryption_type (google.cloud.spanner_admin_database_v1.types.RestoreDatabaseEncryptionConfig.EncryptionType): - Required. The encryption type of the restored - database. - kms_key_name (str): - Optional. The Cloud KMS key that will be used to - encrypt/decrypt the restored database. This field should be - set only when - [encryption_type][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig.encryption_type] - is ``CUSTOMER_MANAGED_ENCRYPTION``. Values are of the form - ``projects//locations//keyRings//cryptoKeys/``. - kms_key_names (MutableSequence[str]): - Optional. Specifies the KMS configuration for the one or - more keys used to encrypt the database. Values are of the - form - ``projects//locations//keyRings//cryptoKeys/``. - - The keys referenced by kms_key_names must fully cover all - regions of the database instance configuration. Some - examples: - - - For single region database instance configs, specify a - single regional location KMS key. - - For multi-regional database instance configs of type - GOOGLE_MANAGED, either specify a multi-regional location - KMS key or multiple regional location KMS keys that cover - all regions in the instance config. - - For a database instance config of type USER_MANAGED, - please specify only regional location KMS keys to cover - each region in the instance config. Multi-regional - location KMS keys are not supported for USER_MANAGED - instance configs. - """ - class EncryptionType(proto.Enum): - r"""Encryption types for the database to be restored. - - Values: - ENCRYPTION_TYPE_UNSPECIFIED (0): - Unspecified. Do not use. - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION (1): - This is the default option when - [encryption_config][google.spanner.admin.database.v1.RestoreDatabaseEncryptionConfig] - is not specified. - GOOGLE_DEFAULT_ENCRYPTION (2): - Use Google default encryption. - CUSTOMER_MANAGED_ENCRYPTION (3): - Use customer managed encryption. If specified, - ``kms_key_name`` must must contain a valid Cloud KMS key. - """ - ENCRYPTION_TYPE_UNSPECIFIED = 0 - USE_CONFIG_DEFAULT_OR_BACKUP_ENCRYPTION = 1 - GOOGLE_DEFAULT_ENCRYPTION = 2 - CUSTOMER_MANAGED_ENCRYPTION = 3 - - encryption_type: EncryptionType = proto.Field( - proto.ENUM, - number=1, - enum=EncryptionType, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class RestoreDatabaseMetadata(proto.Message): - r"""Metadata type for the long-running operation returned by - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Name of the database being created and - restored to. - source_type (google.cloud.spanner_admin_database_v1.types.RestoreSourceType): - The type of the restore source. - backup_info (google.cloud.spanner_admin_database_v1.types.BackupInfo): - Information about the backup used to restore - the database. - - This field is a member of `oneof`_ ``source_info``. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the - [RestoreDatabase][google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which cancellation of this operation was - received. - [Operations.CancelOperation][google.longrunning.Operations.CancelOperation] - starts asynchronous cancellation on a long-running - operation. The server makes a best effort to cancel the - operation, but success is not guaranteed. Clients can use - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - or other methods to check whether the cancellation succeeded - or whether the operation completed despite cancellation. On - successful cancellation, the operation is not deleted; - instead, it becomes an operation with an - [Operation.error][google.longrunning.Operation.error] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - optimize_database_operation_name (str): - If exists, the name of the long-running operation that will - be used to track the post-restore optimization process to - optimize the performance of the restored database, and - remove the dependency on the restore source. The name is of - the form - ``projects//instances//databases//operations/`` - where the is the name of database being created and restored - to. The metadata type of the long-running operation is - [OptimizeRestoredDatabaseMetadata][google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata]. - This long-running operation will be automatically created by - the system after the RestoreDatabase long-running operation - completes successfully. This operation will not be created - if the restore was not successful. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - source_type: 'RestoreSourceType' = proto.Field( - proto.ENUM, - number=2, - enum='RestoreSourceType', - ) - backup_info: gsad_backup.BackupInfo = proto.Field( - proto.MESSAGE, - number=3, - oneof='source_info', - message=gsad_backup.BackupInfo, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=4, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - optimize_database_operation_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -class OptimizeRestoredDatabaseMetadata(proto.Message): - r"""Metadata type for the long-running operation used to track - the progress of optimizations performed on a newly restored - database. This long-running operation is automatically created - by the system after the successful completion of a database - restore, and cannot be cancelled. - - Attributes: - name (str): - Name of the restored database being - optimized. - progress (google.cloud.spanner_admin_database_v1.types.OperationProgress): - The progress of the post-restore - optimizations. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - - -class DatabaseRole(proto.Message): - r"""A Cloud Spanner database role. - - Attributes: - name (str): - Required. The name of the database role. Values are of the - form - ``projects//instances//databases//databaseRoles/`` - where ```` is as specified in the ``CREATE ROLE`` DDL - statement. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDatabaseRolesRequest(proto.Message): - r"""The request for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Attributes: - parent (str): - Required. The database whose roles should be listed. Values - are of the form - ``projects//instances//databases/``. - page_size (int): - Number of database roles to be returned in - the response. If 0 or less, defaults to the - server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.database.v1.ListDatabaseRolesResponse.next_page_token] - from a previous - [ListDatabaseRolesResponse][google.spanner.admin.database.v1.ListDatabaseRolesResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDatabaseRolesResponse(proto.Message): - r"""The response for - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles]. - - Attributes: - database_roles (MutableSequence[google.cloud.spanner_admin_database_v1.types.DatabaseRole]): - Database roles that matched the request. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListDatabaseRoles][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles] - call to fetch more of the matching roles. - """ - - @property - def raw_page(self): - return self - - database_roles: MutableSequence['DatabaseRole'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DatabaseRole', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AddSplitPointsRequest(proto.Message): - r"""The request for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - Attributes: - database (str): - Required. The database on whose tables/indexes split points - are to be added. Values are of the form - ``projects//instances//databases/``. - split_points (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]): - Required. The split points to add. - initiator (str): - Optional. A user-supplied tag associated with the split - points. For example, "intital_data_load", "special_event_1". - Defaults to "CloudAddSplitPointsAPI" if not specified. The - length of the tag must not exceed 50 characters,else will be - trimmed. Only valid UTF8 characters are allowed. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - split_points: MutableSequence['SplitPoints'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='SplitPoints', - ) - initiator: str = proto.Field( - proto.STRING, - number=3, - ) - - -class AddSplitPointsResponse(proto.Message): - r"""The response for - [AddSplitPoints][google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints]. - - """ - - -class SplitPoints(proto.Message): - r"""The split points of a table/index. - - Attributes: - table (str): - The table to split. - index (str): - The index to split. If specified, the ``table`` field must - refer to the index's base table. - keys (MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints.Key]): - Required. The list of split keys, i.e., the - split boundaries. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The expiration timestamp of the - split points. A timestamp in the past means - immediate expiration. The maximum value can be - 30 days in the future. Defaults to 10 days in - the future if not specified. - """ - - class Key(proto.Message): - r"""A split key. - - Attributes: - key_parts (google.protobuf.struct_pb2.ListValue): - Required. The column values making up the - split key. - """ - - key_parts: struct_pb2.ListValue = proto.Field( - proto.MESSAGE, - number=1, - message=struct_pb2.ListValue, - ) - - table: str = proto.Field( - proto.STRING, - number=1, - ) - index: str = proto.Field( - proto.STRING, - number=2, - ) - keys: MutableSequence[Key] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Key, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class InternalUpdateGraphOperationRequest(proto.Message): - r"""Internal request proto, do not use directly. - - Attributes: - database (str): - Internal field, do not use directly. - operation_id (str): - Internal field, do not use directly. - vm_identity_token (str): - Internal field, do not use directly. - progress (float): - Internal field, do not use directly. - status (google.rpc.status_pb2.Status): - Internal field, do not use directly. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - operation_id: str = proto.Field( - proto.STRING, - number=2, - ) - vm_identity_token: str = proto.Field( - proto.STRING, - number=5, - ) - progress: float = proto.Field( - proto.DOUBLE, - number=3, - ) - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=6, - message=status_pb2.Status, - ) - - -class InternalUpdateGraphOperationResponse(proto.Message): - r"""Internal response proto, do not use directly. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_database/v1/mypy.ini b/owl-bot-staging/spanner_admin_database/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_database/v1/noxfile.py b/owl-bot-staging/spanner_admin_database/v1/noxfile.py deleted file mode 100644 index feed1a3ac8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/noxfile.py +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] - -DEFAULT_PYTHON_VERSION = "3.14" - -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): -# Switch this to Python 3.15 alpha1 -# https://peps.python.org/pep-0790/ -PREVIEW_PYTHON_VERSION = "3.14" - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-spanner-admin-database" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=PREVIEW_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json deleted file mode 100644 index e89008727d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json +++ /dev/null @@ -1,4480 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.spanner.admin.database.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-spanner-admin-database", - "version": "0.0.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.add_split_points", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "AddSplitPoints" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "split_points", - "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", - "shortName": "add_split_points" - }, - "description": "Sample for AddSplitPoints", - "file": "spanner_v1_generated_database_admin_add_split_points_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_add_split_points_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.add_split_points", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.AddSplitPoints", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "AddSplitPoints" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "split_points", - "type": "MutableSequence[google.cloud.spanner_admin_database_v1.types.SplitPoints]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.AddSplitPointsResponse", - "shortName": "add_split_points" - }, - "description": "Sample for AddSplitPoints", - "file": "spanner_v1_generated_database_admin_add_split_points_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_add_split_points_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.copy_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CopyBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "source_backup", - "type": "str" - }, - { - "name": "expire_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "copy_backup" - }, - "description": "Sample for CopyBackup", - "file": "spanner_v1_generated_database_admin_copy_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_copy_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.copy_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CopyBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CopyBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CopyBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "source_backup", - "type": "str" - }, - { - "name": "expire_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "copy_backup" - }, - "description": "Sample for CopyBackup", - "file": "spanner_v1_generated_database_admin_copy_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CopyBackup_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_copy_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "backup_schedule_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "create_backup_schedule" - }, - "description": "Sample for CreateBackupSchedule", - "file": "spanner_v1_generated_database_admin_create_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupScheduleRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "backup_schedule_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "create_backup_schedule" - }, - "description": "Sample for CreateBackupSchedule", - "file": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_backup" - }, - "description": "Sample for CreateBackup", - "file": "spanner_v1_generated_database_admin_create_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateBackupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "backup_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_backup" - }, - "description": "Sample for CreateBackup", - "file": "spanner_v1_generated_database_admin_create_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateBackup_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.create_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "create_statement", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "spanner_v1_generated_database_admin_create_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.create_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "create_statement", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "spanner_v1_generated_database_admin_create_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_create_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup_schedule" - }, - "description": "Sample for DeleteBackupSchedule", - "file": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup_schedule" - }, - "description": "Sample for DeleteBackupSchedule", - "file": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.delete_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "spanner_v1_generated_database_admin_delete_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.delete_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DeleteBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "spanner_v1_generated_database_admin_delete_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_delete_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.drop_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DropDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "drop_database" - }, - "description": "Sample for DropDatabase", - "file": "spanner_v1_generated_database_admin_drop_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_drop_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.drop_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "DropDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.DropDatabaseRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "drop_database" - }, - "description": "Sample for DropDatabase", - "file": "spanner_v1_generated_database_admin_drop_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_DropDatabase_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_drop_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "get_backup_schedule" - }, - "description": "Sample for GetBackupSchedule", - "file": "spanner_v1_generated_database_admin_get_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupScheduleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "get_backup_schedule" - }, - "description": "Sample for GetBackupSchedule", - "file": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "spanner_v1_generated_database_admin_get_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "spanner_v1_generated_database_admin_get_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetBackup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", - "shortName": "get_database_ddl" - }, - "description": "Sample for GetDatabaseDdl", - "file": "spanner_v1_generated_database_admin_get_database_ddl_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_ddl_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.GetDatabaseDdlResponse", - "shortName": "get_database_ddl" - }, - "description": "Sample for GetDatabaseDdl", - "file": "spanner_v1_generated_database_admin_get_database_ddl_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_ddl_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "spanner_v1_generated_database_admin_get_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "spanner_v1_generated_database_admin_get_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_database_admin_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_database_admin_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.internal_update_graph_operation", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "InternalUpdateGraphOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "operation_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", - "shortName": "internal_update_graph_operation" - }, - "description": "Sample for InternalUpdateGraphOperation", - "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.internal_update_graph_operation", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.InternalUpdateGraphOperation", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "InternalUpdateGraphOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "operation_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.InternalUpdateGraphOperationResponse", - "shortName": "internal_update_graph_operation" - }, - "description": "Sample for InternalUpdateGraphOperation", - "file": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsAsyncPager", - "shortName": "list_backup_operations" - }, - "description": "Sample for ListBackupOperations", - "file": "spanner_v1_generated_database_admin_list_backup_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupOperationsPager", - "shortName": "list_backup_operations" - }, - "description": "Sample for ListBackupOperations", - "file": "spanner_v1_generated_database_admin_list_backup_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backup_schedules", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupSchedules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesAsyncPager", - "shortName": "list_backup_schedules" - }, - "description": "Sample for ListBackupSchedules", - "file": "spanner_v1_generated_database_admin_list_backup_schedules_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_schedules_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backup_schedules", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackupSchedules", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackupSchedules" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupSchedulesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupSchedulesPager", - "shortName": "list_backup_schedules" - }, - "description": "Sample for ListBackupSchedules", - "file": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backup_schedules_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_backups", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsAsyncPager", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "spanner_v1_generated_database_admin_list_backups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_backups", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListBackups", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListBackupsPager", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "spanner_v1_generated_database_admin_list_backups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListBackups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_backups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsAsyncPager", - "shortName": "list_database_operations" - }, - "description": "Sample for ListDatabaseOperations", - "file": "spanner_v1_generated_database_admin_list_database_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_operations", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseOperations", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseOperationsPager", - "shortName": "list_database_operations" - }, - "description": "Sample for ListDatabaseOperations", - "file": "spanner_v1_generated_database_admin_list_database_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_database_roles", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseRoles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesAsyncPager", - "shortName": "list_database_roles" - }, - "description": "Sample for ListDatabaseRoles", - "file": "spanner_v1_generated_database_admin_list_database_roles_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_roles_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_database_roles", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabaseRoles", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabaseRoles" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabaseRolesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabaseRolesPager", - "shortName": "list_database_roles" - }, - "description": "Sample for ListDatabaseRoles", - "file": "spanner_v1_generated_database_admin_list_database_roles_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_database_roles_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.list_databases", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesAsyncPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "spanner_v1_generated_database_admin_list_databases_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_databases_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.list_databases", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.services.database_admin.pagers.ListDatabasesPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "spanner_v1_generated_database_admin_list_databases_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_ListDatabases_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_list_databases_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.restore_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "RestoreDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "backup", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restore_database" - }, - "description": "Sample for RestoreDatabase", - "file": "spanner_v1_generated_database_admin_restore_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_restore_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.restore_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.RestoreDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "RestoreDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.RestoreDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "backup", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restore_database" - }, - "description": "Sample for RestoreDatabase", - "file": "spanner_v1_generated_database_admin_restore_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_restore_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_database_admin_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_database_admin_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_database_admin_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "update_backup_schedule" - }, - "description": "Sample for UpdateBackupSchedule", - "file": "spanner_v1_generated_database_admin_update_backup_schedule_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_schedule_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup_schedule", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackupSchedule", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackupSchedule" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupScheduleRequest" - }, - { - "name": "backup_schedule", - "type": "google.cloud.spanner_admin_database_v1.types.BackupSchedule" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.BackupSchedule", - "shortName": "update_backup_schedule" - }, - "description": "Sample for UpdateBackupSchedule", - "file": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_schedule_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "update_backup" - }, - "description": "Sample for UpdateBackup", - "file": "spanner_v1_generated_database_admin_update_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_backup", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateBackup", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateBackupRequest" - }, - { - "name": "backup", - "type": "google.cloud.spanner_admin_database_v1.types.Backup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_database_v1.types.Backup", - "shortName": "update_backup" - }, - "description": "Sample for UpdateBackup", - "file": "spanner_v1_generated_database_admin_update_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "statements", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_database_ddl" - }, - "description": "Sample for UpdateDatabaseDdl", - "file": "spanner_v1_generated_database_admin_update_database_ddl_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_ddl_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database_ddl", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabaseDdl" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseDdlRequest" - }, - { - "name": "database", - "type": "str" - }, - { - "name": "statements", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_database_ddl" - }, - "description": "Sample for UpdateDatabaseDdl", - "file": "spanner_v1_generated_database_admin_update_database_ddl_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_ddl_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient", - "shortName": "DatabaseAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminAsyncClient.update_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.spanner_admin_database_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "spanner_v1_generated_database_admin_update_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient", - "shortName": "DatabaseAdminClient" - }, - "fullName": "google.cloud.spanner_admin_database_v1.DatabaseAdminClient.update_database", - "method": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase", - "service": { - "fullName": "google.spanner.admin.database.v1.DatabaseAdmin", - "shortName": "DatabaseAdmin" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_database_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.spanner_admin_database_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "spanner_v1_generated_database_admin_update_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_database_admin_update_database_sync.py" - } - ] -} diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py deleted file mode 100644 index ff6fcfe598..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AddSplitPoints -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = await client.add_split_points(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py deleted file mode 100644 index 3819bbe986..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_add_split_points_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AddSplitPoints -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_add_split_points(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.AddSplitPointsRequest( - database="database_value", - ) - - # Make the request - response = client.add_split_points(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_AddSplitPoints_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py deleted file mode 100644 index d885947bb5..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CopyBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py deleted file mode 100644 index a571e058c9..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_copy_backup_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CopyBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_copy_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CopyBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - source_backup="source_backup_value", - ) - - # Make the request - operation = client.copy_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CopyBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py deleted file mode 100644 index 2ad8881f54..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py deleted file mode 100644 index efdcc2457e..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = await client.create_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py deleted file mode 100644 index 60d4b50c3b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_schedule_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_create_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupScheduleRequest( - parent="parent_value", - backup_schedule_id="backup_schedule_id_value", - ) - - # Make the request - response = client.create_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py deleted file mode 100644 index 02b9d1f0e7..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_backup_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_create_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateBackupRequest( - parent="parent_value", - backup_id="backup_id_value", - ) - - # Make the request - operation = client.create_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py deleted file mode 100644 index 47399a8d40..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py deleted file mode 100644 index 6f112cd8a7..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_create_database_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_create_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.CreateDatabaseRequest( - parent="parent_value", - create_statement="create_statement_value", - ) - - # Make the request - operation = client.create_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_CreateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py deleted file mode 100644 index ab10785105..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py deleted file mode 100644 index 591d45cb10..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - await client.delete_backup_schedule(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py deleted file mode 100644 index 720417ba65..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_schedule_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_delete_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupScheduleRequest( - name="name_value", - ) - - # Make the request - client.delete_backup_schedule(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py deleted file mode 100644 index 736dc56a23..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_delete_backup_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_delete_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - client.delete_backup(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DeleteBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py deleted file mode 100644 index 15f279b72d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DropDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - await client.drop_database(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py deleted file mode 100644 index f218cabd83..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_drop_database_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DropDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_drop_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.DropDatabaseRequest( - database="database_value", - ) - - # Make the request - client.drop_database(request=request) - - -# [END spanner_v1_generated_DatabaseAdmin_DropDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py deleted file mode 100644 index 58b93a119a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py deleted file mode 100644 index 5a37eec975..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py deleted file mode 100644 index 4006cac333..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_schedule_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupScheduleRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py deleted file mode 100644 index 16cffcd78d..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_backup_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py deleted file mode 100644 index fd8621c27b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py deleted file mode 100644 index 8e84b21f78..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = await client.get_database_ddl(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py deleted file mode 100644 index 495b557a55..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_ddl_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseDdlRequest( - database="database_value", - ) - - # Make the request - response = client.get_database_ddl(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py deleted file mode 100644 index ab729bb9e3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_get_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py deleted file mode 100644 index d5d75de78b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py deleted file mode 100644 index 75e0b48b1b..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py deleted file mode 100644 index 556205a0aa..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InternalUpdateGraphOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = await client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py deleted file mode 100644 index 46f1a3c88f..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_internal_update_graph_operation_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InternalUpdateGraphOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_internal_update_graph_operation(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.InternalUpdateGraphOperationRequest( - database="database_value", - operation_id="operation_id_value", - vm_identity_token="vm_identity_token_value", - ) - - # Make the request - response = client.internal_update_graph_operation(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_InternalUpdateGraphOperation_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py deleted file mode 100644 index a56ec9f80e..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py deleted file mode 100644 index 6383e1b247..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_backup_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py deleted file mode 100644 index 25ac53891a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupSchedules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py deleted file mode 100644 index 89cf82d278..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backup_schedules_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupSchedules -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_backup_schedules(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupSchedulesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_schedules(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackupSchedules_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py deleted file mode 100644 index 140e519e07..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackups_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py deleted file mode 100644 index 9f04036f74..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_backups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListBackups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_backups(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListBackups_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py deleted file mode 100644 index 3bc614b232..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py deleted file mode 100644 index 3d4dc965a9..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_database_operations(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseOperations_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py deleted file mode 100644 index 46ec91ce89..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseRoles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py deleted file mode 100644 index d39e4759dd..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_database_roles_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabaseRoles -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_database_roles(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabaseRolesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_database_roles(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabaseRoles_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py deleted file mode 100644 index 586dfa56f1..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py deleted file mode 100644 index e6ef221af6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_list_databases_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_list_databases(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_ListDatabases_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py deleted file mode 100644 index 384c063c61..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py deleted file mode 100644 index a327a8ae13..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_restore_database_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_restore_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.RestoreDatabaseRequest( - backup="backup_value", - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - operation = client.restore_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_RestoreDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py deleted file mode 100644 index edade4c950..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py deleted file mode 100644 index 28a6746f4a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py deleted file mode 100644 index 0e6ea91cb3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py deleted file mode 100644 index 3fd0316dc1..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py deleted file mode 100644 index 95fa2a63f6..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = await client.update_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py deleted file mode 100644 index de17dfc86e..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = await client.update_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py deleted file mode 100644 index 4ef64a0673..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_schedule_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupSchedule -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_backup_schedule(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupScheduleRequest( - ) - - # Make the request - response = client.update_backup_schedule(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackupSchedule_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py deleted file mode 100644 index 9dbb0148dc..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_backup_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_backup(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateBackupRequest( - ) - - # Make the request - response = client.update_backup(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateBackup_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py deleted file mode 100644 index d5588c3036..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py deleted file mode 100644 index ad98e2da9c..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -async def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_async] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py deleted file mode 100644 index 73297524b9..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_ddl_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabaseDdl -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_database_ddl(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - request = spanner_admin_database_v1.UpdateDatabaseDdlRequest( - database="database_value", - statements=['statements_value1', 'statements_value2'], - ) - - # Make the request - operation = client.update_database_ddl(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabaseDdl_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py b/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py deleted file mode 100644 index 62ed40bc84..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/samples/generated_samples/spanner_v1_generated_database_admin_update_database_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-database - - -# [START spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_database_v1 - - -def sample_update_database(): - # Create a client - client = spanner_admin_database_v1.DatabaseAdminClient() - - # Initialize request argument(s) - database = spanner_admin_database_v1.Database() - database.name = "name_value" - - request = spanner_admin_database_v1.UpdateDatabaseRequest( - database=database, - ) - - # Make the request - operation = client.update_database(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_DatabaseAdmin_UpdateDatabase_sync] diff --git a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py b/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py deleted file mode 100644 index d642e9a0e3..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/scripts/fixup_spanner_admin_database_v1_keywords.py +++ /dev/null @@ -1,202 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spanner_admin_databaseCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'add_split_points': ('database', 'split_points', 'initiator', ), - 'copy_backup': ('parent', 'backup_id', 'source_backup', 'expire_time', 'encryption_config', ), - 'create_backup': ('parent', 'backup_id', 'backup', 'encryption_config', ), - 'create_backup_schedule': ('parent', 'backup_schedule_id', 'backup_schedule', ), - 'create_database': ('parent', 'create_statement', 'extra_statements', 'encryption_config', 'database_dialect', 'proto_descriptors', ), - 'delete_backup': ('name', ), - 'delete_backup_schedule': ('name', ), - 'drop_database': ('database', ), - 'get_backup': ('name', ), - 'get_backup_schedule': ('name', ), - 'get_database': ('name', ), - 'get_database_ddl': ('database', ), - 'get_iam_policy': ('resource', 'options', ), - 'internal_update_graph_operation': ('database', 'operation_id', 'vm_identity_token', 'progress', 'status', ), - 'list_backup_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_backups': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_backup_schedules': ('parent', 'page_size', 'page_token', ), - 'list_database_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_database_roles': ('parent', 'page_size', 'page_token', ), - 'list_databases': ('parent', 'page_size', 'page_token', ), - 'restore_database': ('parent', 'database_id', 'backup', 'encryption_config', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_backup': ('backup', 'update_mask', ), - 'update_backup_schedule': ('backup_schedule', 'update_mask', ), - 'update_database': ('database', 'update_mask', ), - 'update_database_ddl': ('database', 'statements', 'operation_id', 'proto_descriptors', 'throughput_mode', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spanner_admin_databaseCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner_admin_database client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_database/v1/setup.py b/owl-bot-staging/spanner_admin_database/v1/setup.py deleted file mode 100644 index 64017e80e7..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/setup.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-spanner-admin-database' - - -description = "Google Cloud Spanner Admin Database API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/spanner_admin_database/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "grpcio >= 1.33.2, < 2.0.0", - "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-database" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.14.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt deleted file mode 100644 index 5b1ee6c35a..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,13 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py deleted file mode 100644 index 0ba71c42e0..0000000000 --- a/owl-bot-staging/spanner_admin_database/v1/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py +++ /dev/null @@ -1,22226 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import re -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminAsyncClient -from google.cloud.spanner_admin_database_v1.services.database_admin import DatabaseAdminClient -from google.cloud.spanner_admin_database_v1.services.database_admin import pagers -from google.cloud.spanner_admin_database_v1.services.database_admin import transports -from google.cloud.spanner_admin_database_v1.types import backup -from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup -from google.cloud.spanner_admin_database_v1.types import backup_schedule -from google.cloud.spanner_admin_database_v1.types import backup_schedule as gsad_backup_schedule -from google.cloud.spanner_admin_database_v1.types import common -from google.cloud.spanner_admin_database_v1.types import spanner_database_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DatabaseAdminClient._get_default_mtls_endpoint(None) is None - assert DatabaseAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DatabaseAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DatabaseAdminClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DatabaseAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DatabaseAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DatabaseAdminClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DatabaseAdminClient._get_client_cert_source(None, False) is None - assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DatabaseAdminClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DatabaseAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE - default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DatabaseAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DatabaseAdminClient.DEFAULT_MTLS_ENDPOINT - assert DatabaseAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DatabaseAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DatabaseAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DatabaseAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DatabaseAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DatabaseAdminClient._get_universe_domain(None, None) == DatabaseAdminClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DatabaseAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DatabaseAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DatabaseAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatabaseAdminClient, "grpc"), - (DatabaseAdminAsyncClient, "grpc_asyncio"), - (DatabaseAdminClient, "rest"), -]) -def test_database_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DatabaseAdminGrpcTransport, "grpc"), - (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DatabaseAdminRestTransport, "rest"), -]) -def test_database_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DatabaseAdminClient, "grpc"), - (DatabaseAdminAsyncClient, "grpc_asyncio"), - (DatabaseAdminClient, "rest"), -]) -def test_database_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -def test_database_admin_client_get_transport_class(): - transport = DatabaseAdminClient.get_transport_class() - available_transports = [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminRestTransport, - ] - assert transport in available_transports - - transport = DatabaseAdminClient.get_transport_class("grpc") - assert transport == transports.DatabaseAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), -]) -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -def test_database_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DatabaseAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "true"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", "false"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "true"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", "false"), -]) -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_database_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DatabaseAdminClient, DatabaseAdminAsyncClient -]) -@mock.patch.object(DatabaseAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatabaseAdminAsyncClient)) -def test_database_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DatabaseAdminClient, DatabaseAdminAsyncClient -]) -@mock.patch.object(DatabaseAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminClient)) -@mock.patch.object(DatabaseAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DatabaseAdminAsyncClient)) -def test_database_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DatabaseAdminClient._DEFAULT_UNIVERSE - default_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DatabaseAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc"), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest"), -]) -def test_database_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DatabaseAdminClient, transports.DatabaseAdminRestTransport, "rest", None), -]) -def test_database_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_database_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DatabaseAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport, "grpc", grpc_helpers), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_database_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=None, - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabasesRequest, - dict, -]) -def test_list_databases(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_databases_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_databases(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_databases_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_databases in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_databases] = mock_rpc - - request = {} - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabasesRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - -def test_list_databases_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_databases_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_databases_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_databases(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in results) -def test_list_databases_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_databases(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_databases_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_databases(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.CreateDatabaseRequest, - dict, -]) -def test_create_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.CreateDatabaseRequest( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.CreateDatabaseRequest( - parent='parent_value', - create_statement='create_statement_value', - ) - -def test_create_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_database] = mock_rpc - - request = {} - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.CreateDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - -def test_create_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].create_statement - mock_val = 'create_statement_value' - assert arg == mock_val - - -def test_create_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent='parent_value', - create_statement='create_statement_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].create_statement - mock_val = 'create_statement_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseRequest, - dict, -]) -def test_get_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True - - -def test_get_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.GetDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseRequest( - name='name_value', - ) - -def test_get_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_database] = mock_rpc - - request = {} - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - )) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - -def test_get_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = spanner_database_admin.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseRequest, - dict, -]) -def test_update_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.UpdateDatabaseRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseRequest( - ) - -def test_update_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_database] = mock_rpc - - request = {} - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_async_from_dict(): - await test_update_database_async(request_type=dict) - -def test_update_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -def test_update_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database( - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = spanner_database_admin.Database(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database( - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = spanner_database_admin.Database(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseDdlRequest, - dict, -]) -def test_update_database_ddl(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_database_ddl_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.UpdateDatabaseDdlRequest( - database='database_value', - operation_id='operation_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_database_ddl(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.UpdateDatabaseDdlRequest( - database='database_value', - operation_id='operation_id_value', - ) - -def test_update_database_ddl_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc - request = {} - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_database_ddl in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_database_ddl] = mock_rpc - - request = {} - await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.UpdateDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_database_ddl_async_from_dict(): - await test_update_database_ddl_async(request_type=dict) - -def test_update_database_ddl_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_ddl_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.UpdateDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_update_database_ddl_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database_ddl( - database='database_value', - statements=['statements_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].statements - mock_val = ['statements_value'] - assert arg == mock_val - - -def test_update_database_ddl_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - -@pytest.mark.asyncio -async def test_update_database_ddl_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database_ddl( - database='database_value', - statements=['statements_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].statements - mock_val = ['statements_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_ddl_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.DropDatabaseRequest, - dict, -]) -def test_drop_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.DropDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_drop_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.DropDatabaseRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.drop_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.DropDatabaseRequest( - database='database_value', - ) - -def test_drop_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.drop_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc - request = {} - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.drop_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_drop_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.drop_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.drop_database] = mock_rpc - - request = {} - await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.drop_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_drop_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.DropDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.DropDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_drop_database_async_from_dict(): - await test_drop_database_async(request_type=dict) - -def test_drop_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.DropDatabaseRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = None - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_drop_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.DropDatabaseRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_drop_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.drop_database( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_drop_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_drop_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.drop_database( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_drop_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseDdlRequest, - dict, -]) -def test_get_database_ddl(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - ) - response = client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - assert response.proto_descriptors == b'proto_descriptors_blob' - - -def test_get_database_ddl_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.GetDatabaseDdlRequest( - database='database_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_database_ddl(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.GetDatabaseDdlRequest( - database='database_value', - ) - -def test_get_database_ddl_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc - request = {} - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_ddl_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_database_ddl in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_database_ddl] = mock_rpc - - request = {} - await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_ddl_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.GetDatabaseDdlRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - )) - response = await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.GetDatabaseDdlRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - assert response.proto_descriptors == b'proto_descriptors_blob' - - -@pytest.mark.asyncio -async def test_get_database_ddl_async_from_dict(): - await test_get_database_ddl_async(request_type=dict) - -def test_get_database_ddl_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_ddl_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.GetDatabaseDdlRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) - await client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_get_database_ddl_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database_ddl( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - - -def test_get_database_ddl_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - -@pytest.mark.asyncio -async def test_get_database_ddl_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database_ddl( - database='database_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_ddl_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - - -def test_test_iam_permissions_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.CreateBackupRequest, - dict, -]) -def test_create_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup.CreateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup.CreateBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.CreateBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - ) - -def test_create_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc - request = {} - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup] = mock_rpc - - request = {} - await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.CreateBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup.CreateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_backup_async_from_dict(): - await test_create_backup_async(request_type=dict) - -def test_create_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.CreateBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.CreateBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - - -def test_create_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup.CopyBackupRequest, - dict, -]) -def test_copy_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.CopyBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_copy_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.CopyBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.copy_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.CopyBackupRequest( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - ) - -def test_copy_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.copy_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc - request = {} - client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.copy_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_copy_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.copy_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.copy_backup] = mock_rpc - - request = {} - await client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.copy_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_copy_backup_async(transport: str = 'grpc_asyncio', request_type=backup.CopyBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.CopyBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_copy_backup_async_from_dict(): - await test_copy_backup_async(request_type=dict) - -def test_copy_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.CopyBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_copy_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.CopyBackupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_copy_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.copy_backup( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - arg = args[0].source_backup - mock_val = 'source_backup_value' - assert arg == mock_val - assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) - - -def test_copy_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.copy_backup( - backup.CopyBackupRequest(), - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - -@pytest.mark.asyncio -async def test_copy_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.copy_backup( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_id - mock_val = 'backup_id_value' - assert arg == mock_val - arg = args[0].source_backup - mock_val = 'source_backup_value' - assert arg == mock_val - assert TimestampRule().to_proto(args[0].expire_time) == timestamp_pb2.Timestamp(seconds=751) - -@pytest.mark.asyncio -async def test_copy_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.copy_backup( - backup.CopyBackupRequest(), - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.parametrize("request_type", [ - backup.GetBackupRequest, - dict, -]) -def test_get_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.GetBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.GetBackupRequest( - name='name_value', - ) - -def test_get_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup] = mock_rpc - - request = {} - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backup.GetBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - response = await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) - -def test_get_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backup.Backup() - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.UpdateBackupRequest, - dict, -]) -def test_update_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - response = client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup.UpdateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -def test_update_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup.UpdateBackupRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup.UpdateBackupRequest( - ) - -def test_update_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc - request = {} - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_backup] = mock_rpc - - request = {} - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=gsad_backup.UpdateBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - response = await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup.UpdateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) - -def test_update_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.UpdateBackupRequest() - - request.backup.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = gsad_backup.Backup() - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup.UpdateBackupRequest() - - request.backup.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = gsad_backup.Backup(database='database_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backup.DeleteBackupRequest, - dict, -]) -def test_delete_backup(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.DeleteBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.DeleteBackupRequest( - name='name_value', - ) - -def test_delete_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup] = mock_rpc - - request = {} - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backup.DeleteBackupRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) - -def test_delete_backup_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = None - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupsRequest, - dict, -]) -def test_list_backups(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.ListBackupsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_backups_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backups in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backups] = mock_rpc - - request = {} - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) - -def test_list_backups_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backup.ListBackupsResponse() - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backups_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backups_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backups_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backups_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backups(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup.Backup) - for i in results) -def test_list_backups_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backups_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backup.Backup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backups_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.RestoreDatabaseRequest, - dict, -]) -def test_restore_database(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.RestoreDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.restore_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.RestoreDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - -def test_restore_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc - request = {} - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.restore_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.restore_database] = mock_rpc - - request = {} - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_database_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.RestoreDatabaseRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.RestoreDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_database_async_from_dict(): - await test_restore_database_async(request_type=dict) - -def test_restore_database_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.RestoreDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restore_database_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.RestoreDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_restore_database_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.restore_database( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - assert args[0].backup == 'backup_value' - - -def test_restore_database_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - -@pytest.mark.asyncio -async def test_restore_database_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.restore_database( - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - assert args[0].backup == 'backup_value' - -@pytest.mark.asyncio -async def test_restore_database_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseOperationsRequest, - dict, -]) -def test_list_database_operations(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_database_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.ListDatabaseOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_database_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_database_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc - request = {} - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_database_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_database_operations] = mock_rpc - - request = {} - await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_database_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseOperationsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_database_operations_async_from_dict(): - await test_list_database_operations_async(request_type=dict) - -def test_list_database_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_database_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) - await client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_database_operations_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_database_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_database_operations_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_database_operations_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_database_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_database_operations_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - - -def test_list_database_operations_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_database_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_database_operations_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_database_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_database_operations_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_database_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_database_operations_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_database_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupOperationsRequest, - dict, -]) -def test_list_backup_operations(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup.ListBackupOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backup_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup.ListBackupOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup.ListBackupOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_backup_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc - request = {} - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_operations] = mock_rpc - - request = {} - await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_operations_async(transport: str = 'grpc_asyncio', request_type=backup.ListBackupOperationsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup.ListBackupOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backup_operations_async_from_dict(): - await test_list_backup_operations_async(request_type=dict) - -def test_list_backup_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = backup.ListBackupOperationsResponse() - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup.ListBackupOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) - await client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_operations_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_operations_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_operations_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup.ListBackupOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_operations_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_operations_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_backup_operations_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_operations_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_operations_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseRolesRequest, - dict, -]) -def test_list_database_roles(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseRolesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_database_roles_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.ListDatabaseRolesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_database_roles(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.ListDatabaseRolesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_database_roles_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_roles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc - request = {} - client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_roles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_roles_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_database_roles in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_database_roles] = mock_rpc - - request = {} - await client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_database_roles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_database_roles_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.ListDatabaseRolesRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.ListDatabaseRolesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_database_roles_async_from_dict(): - await test_list_database_roles_async(request_type=dict) - -def test_list_database_roles_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseRolesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_database_roles_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.ListDatabaseRolesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) - await client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_database_roles_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_database_roles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_database_roles_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_database_roles_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_database_roles( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_database_roles_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), - parent='parent_value', - ) - - -def test_list_database_roles_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_database_roles(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) - for i in results) -def test_list_database_roles_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - pages = list(client.list_database_roles(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_database_roles_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_database_roles(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_database_roles_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_database_roles(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.AddSplitPointsRequest, - dict, -]) -def test_add_split_points(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.AddSplitPointsResponse( - ) - response = client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.AddSplitPointsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) - - -def test_add_split_points_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.AddSplitPointsRequest( - database='database_value', - initiator='initiator_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.add_split_points(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.AddSplitPointsRequest( - database='database_value', - initiator='initiator_value', - ) - -def test_add_split_points_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.add_split_points in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc - request = {} - client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.add_split_points(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_add_split_points_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.add_split_points in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.add_split_points] = mock_rpc - - request = {} - await client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.add_split_points(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_add_split_points_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.AddSplitPointsRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( - )) - response = await client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.AddSplitPointsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) - - -@pytest.mark.asyncio -async def test_add_split_points_async_from_dict(): - await test_add_split_points_async(request_type=dict) - -def test_add_split_points_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.AddSplitPointsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value = spanner_database_admin.AddSplitPointsResponse() - client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_add_split_points_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_database_admin.AddSplitPointsRequest() - - request.database = 'database_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) - await client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database=database_value', - ) in kw['metadata'] - - -def test_add_split_points_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.AddSplitPointsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.add_split_points( - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].split_points - mock_val = [spanner_database_admin.SplitPoints(table='table_value')] - assert arg == mock_val - - -def test_add_split_points_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.add_split_points( - spanner_database_admin.AddSplitPointsRequest(), - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - -@pytest.mark.asyncio -async def test_add_split_points_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.AddSplitPointsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.add_split_points( - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].split_points - mock_val = [spanner_database_admin.SplitPoints(table='table_value')] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_add_split_points_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.add_split_points( - spanner_database_admin.AddSplitPointsRequest(), - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.CreateBackupScheduleRequest, - dict, -]) -def test_create_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - response = client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_create_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup_schedule.CreateBackupScheduleRequest( - parent='parent_value', - backup_schedule_id='backup_schedule_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup_schedule.CreateBackupScheduleRequest( - parent='parent_value', - backup_schedule_id='backup_schedule_id_value', - ) - -def test_create_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc - request = {} - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup_schedule] = mock_rpc - - request = {} - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.CreateBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - response = await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.CreateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_backup_schedule_async_from_dict(): - await test_create_backup_schedule_async(request_type=dict) - -def test_create_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.CreateBackupScheduleRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.CreateBackupScheduleRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - await client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_schedule( - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].backup_schedule_id - mock_val = 'backup_schedule_id_value' - assert arg == mock_val - - -def test_create_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - gsad_backup_schedule.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_schedule( - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].backup_schedule_id - mock_val = 'backup_schedule_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_schedule( - gsad_backup_schedule.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.GetBackupScheduleRequest, - dict, -]) -def test_get_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.BackupSchedule( - name='name_value', - ) - response = client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup_schedule.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_get_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup_schedule.GetBackupScheduleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup_schedule.GetBackupScheduleRequest( - name='name_value', - ) - -def test_get_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc - request = {} - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup_schedule] = mock_rpc - - request = {} - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.GetBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( - name='name_value', - )) - response = await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup_schedule.GetBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_backup_schedule_async_from_dict(): - await test_get_backup_schedule_async(request_type=dict) - -def test_get_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.GetBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = backup_schedule.BackupSchedule() - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.GetBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) - await client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - backup_schedule.GetBackupScheduleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_schedule( - backup_schedule.GetBackupScheduleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.UpdateBackupScheduleRequest, - dict, -]) -def test_update_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - response = client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -def test_update_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gsad_backup_schedule.UpdateBackupScheduleRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gsad_backup_schedule.UpdateBackupScheduleRequest( - ) - -def test_update_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc - request = {} - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_backup_schedule] = mock_rpc - - request = {} - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - response = await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_backup_schedule_async_from_dict(): - await test_update_backup_schedule_async(request_type=dict) - -def test_update_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - - request.backup_schedule.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_schedule.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - - request.backup_schedule.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - await client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_schedule.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup_schedule( - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - gsad_backup_schedule.UpdateBackupScheduleRequest(), - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gsad_backup_schedule.BackupSchedule() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup_schedule( - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup_schedule - mock_val = gsad_backup_schedule.BackupSchedule(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup_schedule( - gsad_backup_schedule.UpdateBackupScheduleRequest(), - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.DeleteBackupScheduleRequest, - dict, -]) -def test_delete_backup_schedule(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup_schedule.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_backup_schedule_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup_schedule.DeleteBackupScheduleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup_schedule(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup_schedule.DeleteBackupScheduleRequest( - name='name_value', - ) - -def test_delete_backup_schedule_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc - request = {} - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup_schedule in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup_schedule] = mock_rpc - - request = {} - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.DeleteBackupScheduleRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup_schedule.DeleteBackupScheduleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_async_from_dict(): - await test_delete_backup_schedule_async(request_type=dict) - -def test_delete_backup_schedule_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.DeleteBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = None - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_schedule_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.DeleteBackupScheduleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_schedule_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_schedule_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - backup_schedule.DeleteBackupScheduleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_schedule( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_schedule_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_schedule( - backup_schedule.DeleteBackupScheduleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.ListBackupSchedulesRequest, - dict, -]) -def test_list_backup_schedules(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backup_schedule.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupSchedulesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_backup_schedules_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backup_schedule.ListBackupSchedulesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_schedules(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backup_schedule.ListBackupSchedulesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_backup_schedules_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_schedules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc - request = {} - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_schedules in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_schedules] = mock_rpc - - request = {} - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_schedules_async(transport: str = 'grpc_asyncio', request_type=backup_schedule.ListBackupSchedulesRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backup_schedule.ListBackupSchedulesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupSchedulesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_from_dict(): - await test_list_backup_schedules_async(request_type=dict) - -def test_list_backup_schedules_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.ListBackupSchedulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = backup_schedule.ListBackupSchedulesResponse() - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_schedules_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backup_schedule.ListBackupSchedulesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) - await client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_schedules_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.ListBackupSchedulesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_schedules( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_schedules_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - backup_schedule.ListBackupSchedulesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backup_schedule.ListBackupSchedulesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_schedules( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_schedules_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_schedules( - backup_schedule.ListBackupSchedulesRequest(), - parent='parent_value', - ) - - -def test_list_backup_schedules_pager(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_schedules(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup_schedule.BackupSchedule) - for i in results) -def test_list_backup_schedules_pages(transport_name: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_schedules(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_pager(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_schedules(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backup_schedule.BackupSchedule) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_schedules_async_pages(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_schedules(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.InternalUpdateGraphOperationRequest, - dict, -]) -def test_internal_update_graph_operation(request_type, transport: str = 'grpc'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse( - ) - response = client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.InternalUpdateGraphOperationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) - - -def test_internal_update_graph_operation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_database_admin.InternalUpdateGraphOperationRequest( - database='database_value', - operation_id='operation_id_value', - vm_identity_token='vm_identity_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.internal_update_graph_operation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_database_admin.InternalUpdateGraphOperationRequest( - database='database_value', - operation_id='operation_id_value', - vm_identity_token='vm_identity_token_value', - ) - -def test_internal_update_graph_operation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.internal_update_graph_operation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.internal_update_graph_operation] = mock_rpc - request = {} - client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.internal_update_graph_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.internal_update_graph_operation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.internal_update_graph_operation] = mock_rpc - - request = {} - await client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.internal_update_graph_operation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_async(transport: str = 'grpc_asyncio', request_type=spanner_database_admin.InternalUpdateGraphOperationRequest): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( - )) - response = await client.internal_update_graph_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_database_admin.InternalUpdateGraphOperationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.InternalUpdateGraphOperationResponse) - - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_async_from_dict(): - await test_internal_update_graph_operation_async(request_type=dict) - - -def test_internal_update_graph_operation_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.internal_update_graph_operation( - database='database_value', - operation_id='operation_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].operation_id - mock_val = 'operation_id_value' - assert arg == mock_val - - -def test_internal_update_graph_operation_flattened_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.internal_update_graph_operation( - spanner_database_admin.InternalUpdateGraphOperationRequest(), - database='database_value', - operation_id='operation_id_value', - ) - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_flattened_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.internal_update_graph_operation( - database='database_value', - operation_id='operation_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = 'database_value' - assert arg == mock_val - arg = args[0].operation_id - mock_val = 'operation_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_internal_update_graph_operation_flattened_error_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.internal_update_graph_operation( - spanner_database_admin.InternalUpdateGraphOperationRequest(), - database='database_value', - operation_id='operation_id_value', - ) - - -def test_list_databases_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_databases_rest_required_fields(request_type=spanner_database_admin.ListDatabasesRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_databases(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_databases_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_databases_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_databases(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) - - -def test_list_databases_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - spanner_database_admin.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabasesResponse( - databases=[ - spanner_database_admin.Database(), - spanner_database_admin.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_database_admin.ListDatabasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_databases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.Database) - for i in results) - - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_database_rest_required_fields(request_type=spanner_database_admin.CreateDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["create_statement"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["createStatement"] = 'create_statement_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "createStatement" in jsonified_request - assert jsonified_request["createStatement"] == 'create_statement_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "createStatement", ))) - - -def test_create_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - create_statement='create_statement_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases" % client.transport._host, args[1]) - - -def test_create_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - spanner_database_admin.CreateDatabaseRequest(), - parent='parent_value', - create_statement='create_statement_value', - ) - - -def test_get_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_rest_required_fields(request_type=spanner_database_admin.GetDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) - - -def test_get_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - spanner_database_admin.GetDatabaseRequest(), - name='name_value', - ) - - -def test_update_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("database", "updateMask", ))) - - -def test_update_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database.name=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) - - -def test_update_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - spanner_database_admin.UpdateDatabaseRequest(), - database=spanner_database_admin.Database(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_database_ddl_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database_ddl] = mock_rpc - - request = {} - client.update_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_ddl_rest_required_fields(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request_init["statements"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - jsonified_request["statements"] = 'statements_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - assert "statements" in jsonified_request - assert jsonified_request["statements"] == 'statements_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database_ddl(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_ddl_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database_ddl._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "statements", ))) - - -def test_update_database_ddl_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - statements=['statements_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database_ddl(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) - - -def test_update_database_ddl_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database_ddl( - spanner_database_admin.UpdateDatabaseDdlRequest(), - database='database_value', - statements=['statements_value'], - ) - - -def test_drop_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.drop_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.drop_database] = mock_rpc - - request = {} - client.drop_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.drop_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_drop_database_rest_required_fields(request_type=spanner_database_admin.DropDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).drop_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.drop_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_drop_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.drop_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -def test_drop_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.drop_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}" % client.transport._host, args[1]) - - -def test_drop_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.drop_database( - spanner_database_admin.DropDatabaseRequest(), - database='database_value', - ) - - -def test_get_database_ddl_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database_ddl in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database_ddl] = mock_rpc - - request = {} - client.get_database_ddl(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database_ddl(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_ddl_rest_required_fields(request_type=spanner_database_admin.GetDatabaseDdlRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database_ddl._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database_ddl(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_ddl_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database_ddl._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", ))) - - -def test_get_database_ddl_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database_ddl(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}/ddl" % client.transport._host, args[1]) - - -def test_get_database_ddl_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database_ddl( - spanner_database_admin.GetDatabaseDdlRequest(), - database='database_value', - ) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_set_iam_policy_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.set_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:setIamPolicy" % client.transport._host, args[1]) - - -def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", ))) - - -def test_get_iam_policy_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:getIamPolicy" % client.transport._host, args[1]) - - -def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_test_iam_permissions_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - permissions=['permissions_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.test_iam_permissions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*/databases/*}:testIamPermissions" % client.transport._host, args[1]) - - -def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_create_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup] = mock_rpc - - request = {} - client.create_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_rest_required_fields(request_type=gsad_backup.CreateBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == request_init["backup_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupId"] = 'backup_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_id", "encryption_config", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == 'backup_id_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup(request) - - expected_params = [ - ( - "backupId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupId", "encryptionConfig", )) & set(("parent", "backupId", "backup", ))) - - -def test_create_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) - - -def test_create_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup( - gsad_backup.CreateBackupRequest(), - parent='parent_value', - backup=gsad_backup.Backup(database='database_value'), - backup_id='backup_id_value', - ) - - -def test_copy_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.copy_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.copy_backup] = mock_rpc - - request = {} - client.copy_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.copy_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_copy_backup_rest_required_fields(request_type=backup.CopyBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_id"] = "" - request_init["source_backup"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupId"] = 'backup_id_value' - jsonified_request["sourceBackup"] = 'source_backup_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).copy_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupId" in jsonified_request - assert jsonified_request["backupId"] == 'backup_id_value' - assert "sourceBackup" in jsonified_request - assert jsonified_request["sourceBackup"] == 'source_backup_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.copy_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_copy_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.copy_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "backupId", "sourceBackup", "expireTime", ))) - - -def test_copy_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.copy_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups:copy" % client.transport._host, args[1]) - - -def test_copy_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.copy_backup( - backup.CopyBackupRequest(), - parent='parent_value', - backup_id='backup_id_value', - source_backup='source_backup_value', - expire_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_get_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_rest_required_fields(request_type=backup.GetBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) - - -def test_get_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backup.GetBackupRequest(), - name='name_value', - ) - - -def test_update_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc - - request = {} - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_rest_required_fields(request_type=gsad_backup.UpdateBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("backup", "updateMask", ))) - - -def test_update_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup.name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) - - -def test_update_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - gsad_backup.UpdateBackupRequest(), - backup=gsad_backup.Backup(database='database_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_rest_required_fields(request_type=backup.DeleteBackupRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_backup_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/backups/*}" % client.transport._host, args[1]) - - -def test_delete_backup_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backup.DeleteBackupRequest(), - name='name_value', - ) - - -def test_list_backups_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backups_rest_required_fields(request_type=backup.ListBackupsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backups(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backups_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backups_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backups(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backups" % client.transport._host, args[1]) - - -def test_list_backups_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backup.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - backup.Backup(), - ], - next_page_token='abc', - ), - backup.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - ], - next_page_token='ghi', - ), - backup.ListBackupsResponse( - backups=[ - backup.Backup(), - backup.Backup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup.ListBackupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_backups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup.Backup) - for i in results) - - pages = list(client.list_backups(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_restore_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_database] = mock_rpc - - request = {} - client.restore_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restore_database_rest_required_fields(request_type=spanner_database_admin.RestoreDatabaseRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["databaseId"] = 'database_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == 'database_id_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restore_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restore_database_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restore_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "databaseId", ))) - - -def test_restore_database_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - database_id='database_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.restore_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databases:restore" % client.transport._host, args[1]) - - -def test_restore_database_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_database( - spanner_database_admin.RestoreDatabaseRequest(), - parent='parent_value', - database_id='database_id_value', - backup='backup_value', - ) - - -def test_list_database_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_operations] = mock_rpc - - request = {} - client.list_database_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_database_operations_rest_required_fields(request_type=spanner_database_admin.ListDatabaseOperationsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_database_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_database_operations_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_database_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_database_operations_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_database_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/databaseOperations" % client.transport._host, args[1]) - - -def test_list_database_operations_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_operations( - spanner_database_admin.ListDatabaseOperationsRequest(), - parent='parent_value', - ) - - -def test_list_database_operations_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_database_admin.ListDatabaseOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_database_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_database_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_backup_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_operations] = mock_rpc - - request = {} - client.list_backup_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_operations_rest_required_fields(request_type=backup.ListBackupOperationsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_operations_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backup_operations_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/backupOperations" % client.transport._host, args[1]) - - -def test_list_backup_operations_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_operations( - backup.ListBackupOperationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_operations_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - backup.ListBackupOperationsResponse( - operations=[], - next_page_token='def', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - backup.ListBackupOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup.ListBackupOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_backup_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_backup_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_database_roles_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_database_roles in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_database_roles] = mock_rpc - - request = {} - client.list_database_roles(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_database_roles(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_database_roles_rest_required_fields(request_type=spanner_database_admin.ListDatabaseRolesRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_database_roles._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_database_roles(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_database_roles_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_database_roles._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_database_roles_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_database_roles(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/databaseRoles" % client.transport._host, args[1]) - - -def test_list_database_roles_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_database_roles( - spanner_database_admin.ListDatabaseRolesRequest(), - parent='parent_value', - ) - - -def test_list_database_roles_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - next_page_token='abc', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[], - next_page_token='def', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - ], - next_page_token='ghi', - ), - spanner_database_admin.ListDatabaseRolesResponse( - database_roles=[ - spanner_database_admin.DatabaseRole(), - spanner_database_admin.DatabaseRole(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_database_admin.ListDatabaseRolesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - pager = client.list_database_roles(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_database_admin.DatabaseRole) - for i in results) - - pages = list(client.list_database_roles(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_add_split_points_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.add_split_points in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.add_split_points] = mock_rpc - - request = {} - client.add_split_points(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.add_split_points(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_add_split_points_rest_required_fields(request_type=spanner_database_admin.AddSplitPointsRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["database"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["database"] = 'database_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).add_split_points._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "database" in jsonified_request - assert jsonified_request["database"] == 'database_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.AddSplitPointsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.add_split_points(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_add_split_points_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.add_split_points._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("database", "splitPoints", ))) - - -def test_add_split_points_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.AddSplitPointsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.add_split_points(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database=projects/*/instances/*/databases/*}:addSplitPoints" % client.transport._host, args[1]) - - -def test_add_split_points_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.add_split_points( - spanner_database_admin.AddSplitPointsRequest(), - database='database_value', - split_points=[spanner_database_admin.SplitPoints(table='table_value')], - ) - - -def test_create_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_schedule] = mock_rpc - - request = {} - client.create_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_schedule_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupScheduleId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupScheduleId" in jsonified_request - assert jsonified_request["backupScheduleId"] == request_init["backup_schedule_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupScheduleId"] = 'backup_schedule_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_schedule_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupScheduleId" in jsonified_request - assert jsonified_request["backupScheduleId"] == 'backup_schedule_id_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup_schedule(request) - - expected_params = [ - ( - "backupScheduleId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupScheduleId", )) & set(("parent", "backupScheduleId", "backupSchedule", ))) - - -def test_create_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) - - -def test_create_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_schedule( - gsad_backup_schedule.CreateBackupScheduleRequest(), - parent='parent_value', - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - backup_schedule_id='backup_schedule_id_value', - ) - - -def test_get_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_schedule] = mock_rpc - - request = {} - client.get_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_schedule_rest_required_fields(request_type=backup_schedule.GetBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup_schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_get_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_schedule( - backup_schedule.GetBackupScheduleRequest(), - name='name_value', - ) - - -def test_update_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup_schedule] = mock_rpc - - request = {} - client.update_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_schedule_rest_required_fields(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_schedule._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("backupSchedule", "updateMask", ))) - - -def test_update_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule() - - # get arguments that satisfy an http rule for this method - sample_request = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup_schedule.name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_update_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_schedule( - gsad_backup_schedule.UpdateBackupScheduleRequest(), - backup_schedule=gsad_backup_schedule.BackupSchedule(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup_schedule_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_schedule in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_schedule] = mock_rpc - - request = {} - client.delete_backup_schedule(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_backup_schedule(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_schedule_rest_required_fields(request_type=backup_schedule.DeleteBackupScheduleRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_schedule._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_schedule(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_schedule_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup_schedule._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_backup_schedule_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup_schedule(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/databases/*/backupSchedules/*}" % client.transport._host, args[1]) - - -def test_delete_backup_schedule_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_schedule( - backup_schedule.DeleteBackupScheduleRequest(), - name='name_value', - ) - - -def test_list_backup_schedules_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_schedules in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_schedules] = mock_rpc - - request = {} - client.list_backup_schedules(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_schedules(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_schedules_rest_required_fields(request_type=backup_schedule.ListBackupSchedulesRequest): - transport_class = transports.DatabaseAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_schedules._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backup_schedule.ListBackupSchedulesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_schedules(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_schedules_rest_unset_required_fields(): - transport = transports.DatabaseAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_schedules._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backup_schedules_rest_flattened(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.ListBackupSchedulesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_schedules(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*/databases/*}/backupSchedules" % client.transport._host, args[1]) - - -def test_list_backup_schedules_rest_flattened_error(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_schedules( - backup_schedule.ListBackupSchedulesRequest(), - parent='parent_value', - ) - - -def test_list_backup_schedules_rest_pager(transport: str = 'rest'): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - next_page_token='abc', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[], - next_page_token='def', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - ], - next_page_token='ghi', - ), - backup_schedule.ListBackupSchedulesResponse( - backup_schedules=[ - backup_schedule.BackupSchedule(), - backup_schedule.BackupSchedule(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backup_schedule.ListBackupSchedulesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - - pager = client.list_backup_schedules(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backup_schedule.BackupSchedule) - for i in results) - - pages = list(client.list_backup_schedules(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_internal_update_graph_operation_rest_no_http_options(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = spanner_database_admin.InternalUpdateGraphOperationRequest() - with pytest.raises(RuntimeError): - client.internal_update_graph_operation(request) - - -def test_internal_update_graph_operation_rest_error(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.internal_update_graph_operation({}) - assert ( - "Method InternalUpdateGraphOperation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DatabaseAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DatabaseAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DatabaseAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DatabaseAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - transports.DatabaseAdminRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DatabaseAdminClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabasesResponse() - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = spanner_database_admin.Database() - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_ddl_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_drop_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - call.return_value = None - client.drop_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.DropDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_ddl_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - call.return_value = spanner_database_admin.GetDatabaseDdlResponse() - client.get_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.CreateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_copy_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.copy_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.CopyBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backup.Backup() - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = gsad_backup.Backup() - client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = None - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backup.ListBackupsResponse() - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_database_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_operations_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - client.list_database_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_operations_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - call.return_value = backup.ListBackupOperationsResponse() - client.list_backup_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_roles_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - call.return_value = spanner_database_admin.ListDatabaseRolesResponse() - client.list_database_roles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseRolesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_add_split_points_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - call.return_value = spanner_database_admin.AddSplitPointsResponse() - client.add_split_points(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.AddSplitPointsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - call.return_value = backup_schedule.BackupSchedule() - client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - call.return_value = gsad_backup_schedule.BackupSchedule() - client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_schedule_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - call.return_value = None - client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_schedules_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - call.return_value = backup_schedule.ListBackupSchedulesResponse() - client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_internal_update_graph_operation_empty_call_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - call.return_value = spanner_database_admin.InternalUpdateGraphOperationResponse() - client.internal_update_graph_operation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DatabaseAdminAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_databases_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - await client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - )) - await client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_ddl_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_drop_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.drop_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.DropDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_ddl_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - )) - await client.get_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.CreateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_copy_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.copy_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.CopyBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - await client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - )) - await client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backups_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupsResponse( - next_page_token='next_page_token_value', - )) - await client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restore_database_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_database_operations_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_database_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_operations_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_backup_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_database_roles_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - )) - await client.list_database_roles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseRolesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_add_split_points_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.AddSplitPointsResponse( - )) - await client.add_split_points(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.AddSplitPointsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - await client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.BackupSchedule( - name='name_value', - )) - await client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gsad_backup_schedule.BackupSchedule( - name='name_value', - )) - await client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_schedule_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_schedules_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - )) - await client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_internal_update_graph_operation_empty_call_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_database_admin.InternalUpdateGraphOperationResponse( - )) - await client.internal_update_graph_operation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DatabaseAdminClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_databases_rest_bad_request(request_type=spanner_database_admin.ListDatabasesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabasesRequest, - dict, -]) -def test_list_databases_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_databases_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_databases") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.ListDatabasesRequest.pb(spanner_database_admin.ListDatabasesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.ListDatabasesResponse.to_json(spanner_database_admin.ListDatabasesResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.ListDatabasesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabasesResponse() - post_with_metadata.return_value = spanner_database_admin.ListDatabasesResponse(), metadata - - client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_database_rest_bad_request(request_type=spanner_database_admin.CreateDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.CreateDatabaseRequest, - dict, -]) -def test_create_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.CreateDatabaseRequest.pb(spanner_database_admin.CreateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.CreateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_database_rest_bad_request(request_type=spanner_database_admin.GetDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseRequest, - dict, -]) -def test_get_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.Database( - name='name_value', - state=spanner_database_admin.Database.State.CREATING, - version_retention_period='version_retention_period_value', - default_leader='default_leader_value', - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - enable_drop_protection=True, - reconciling=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.Database) - assert response.name == 'name_value' - assert response.state == spanner_database_admin.Database.State.CREATING - assert response.version_retention_period == 'version_retention_period_value' - assert response.default_leader == 'default_leader_value' - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.enable_drop_protection is True - assert response.reconciling is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.GetDatabaseRequest.pb(spanner_database_admin.GetDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.Database.to_json(spanner_database_admin.Database()) - req.return_value.content = return_value - - request = spanner_database_admin.GetDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.Database() - post_with_metadata.return_value = spanner_database_admin.Database(), metadata - - client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseRequest, - dict, -]) -def test_update_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/instances/sample2/databases/sample3'}} - request_init["database"] = {'name': 'projects/sample1/instances/sample2/databases/sample3', 'state': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'restore_info': {'source_type': 1, 'backup_info': {'backup': 'backup_value', 'version_time': {}, 'create_time': {}, 'source_database': 'source_database_value'}}, 'encryption_config': {'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'encryption_info': [{'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}], 'version_retention_period': 'version_retention_period_value', 'earliest_version_time': {}, 'default_leader': 'default_leader_value', 'database_dialect': 1, 'enable_drop_protection': True, 'reconciling': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = spanner_database_admin.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.UpdateDatabaseRequest.pb(spanner_database_admin.UpdateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.UpdateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_ddl_rest_bad_request(request_type=spanner_database_admin.UpdateDatabaseDdlRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database_ddl(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.UpdateDatabaseDdlRequest, - dict, -]) -def test_update_database_ddl_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database_ddl(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_ddl_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_database_ddl_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_database_ddl") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.UpdateDatabaseDdlRequest.pb(spanner_database_admin.UpdateDatabaseDdlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.UpdateDatabaseDdlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_drop_database_rest_bad_request(request_type=spanner_database_admin.DropDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.drop_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.DropDatabaseRequest, - dict, -]) -def test_drop_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.drop_database(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_drop_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_drop_database") as pre: - pre.assert_not_called() - pb_message = spanner_database_admin.DropDatabaseRequest.pb(spanner_database_admin.DropDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_database_admin.DropDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.drop_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_database_ddl_rest_bad_request(request_type=spanner_database_admin.GetDatabaseDdlRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database_ddl(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.GetDatabaseDdlRequest, - dict, -]) -def test_get_database_ddl_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.GetDatabaseDdlResponse( - statements=['statements_value'], - proto_descriptors=b'proto_descriptors_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.GetDatabaseDdlResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database_ddl(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.GetDatabaseDdlResponse) - assert response.statements == ['statements_value'] - assert response.proto_descriptors == b'proto_descriptors_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_ddl_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_database_ddl_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_database_ddl") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.GetDatabaseDdlRequest.pb(spanner_database_admin.GetDatabaseDdlRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.GetDatabaseDdlResponse.to_json(spanner_database_admin.GetDatabaseDdlResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.GetDatabaseDdlRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.GetDatabaseDdlResponse() - post_with_metadata.return_value = spanner_database_admin.GetDatabaseDdlResponse(), metadata - - client.get_database_ddl(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_rest_bad_request(request_type=gsad_backup.CreateBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.CreateBackupRequest, - dict, -]) -def test_create_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'name_value', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup.CreateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup.CreateBackupRequest.pb(gsad_backup.CreateBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gsad_backup.CreateBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_copy_backup_rest_bad_request(request_type=backup.CopyBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.copy_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backup.CopyBackupRequest, - dict, -]) -def test_copy_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.copy_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_copy_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_copy_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_copy_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.CopyBackupRequest.pb(backup.CopyBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backup.CopyBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.copy_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_rest_bad_request(request_type=backup.GetBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backup.GetBackupRequest, - dict, -]) -def test_get_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.GetBackupRequest.pb(backup.GetBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.Backup.to_json(backup.Backup()) - req.return_value.content = return_value - - request = backup.GetBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.Backup() - post_with_metadata.return_value = backup.Backup(), metadata - - client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_rest_bad_request(request_type=gsad_backup.UpdateBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup.UpdateBackupRequest, - dict, -]) -def test_update_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'backup': {'name': 'projects/sample1/instances/sample2/backups/sample3'}} - request_init["backup"] = {'database': 'database_value', 'version_time': {'seconds': 751, 'nanos': 543}, 'expire_time': {}, 'name': 'projects/sample1/instances/sample2/backups/sample3', 'create_time': {}, 'size_bytes': 1089, 'freeable_size_bytes': 2006, 'exclusive_size_bytes': 2168, 'state': 1, 'referencing_databases': ['referencing_databases_value1', 'referencing_databases_value2'], 'encryption_info': {'encryption_type': 1, 'encryption_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'kms_key_version': 'kms_key_version_value'}, 'encryption_information': {}, 'database_dialect': 1, 'referencing_backups': ['referencing_backups_value1', 'referencing_backups_value2'], 'max_expire_time': {}, 'backup_schedules': ['backup_schedules_value1', 'backup_schedules_value2'], 'incremental_backup_chain_id': 'incremental_backup_chain_id_value', 'oldest_version_time': {}, 'instance_partitions': [{'instance_partition': 'instance_partition_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup.UpdateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup.Backup( - database='database_value', - name='name_value', - size_bytes=1089, - freeable_size_bytes=2006, - exclusive_size_bytes=2168, - state=gsad_backup.Backup.State.CREATING, - referencing_databases=['referencing_databases_value'], - database_dialect=common.DatabaseDialect.GOOGLE_STANDARD_SQL, - referencing_backups=['referencing_backups_value'], - backup_schedules=['backup_schedules_value'], - incremental_backup_chain_id='incremental_backup_chain_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup.Backup) - assert response.database == 'database_value' - assert response.name == 'name_value' - assert response.size_bytes == 1089 - assert response.freeable_size_bytes == 2006 - assert response.exclusive_size_bytes == 2168 - assert response.state == gsad_backup.Backup.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.database_dialect == common.DatabaseDialect.GOOGLE_STANDARD_SQL - assert response.referencing_backups == ['referencing_backups_value'] - assert response.backup_schedules == ['backup_schedules_value'] - assert response.incremental_backup_chain_id == 'incremental_backup_chain_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup.UpdateBackupRequest.pb(gsad_backup.UpdateBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gsad_backup.Backup.to_json(gsad_backup.Backup()) - req.return_value.content = return_value - - request = gsad_backup.UpdateBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gsad_backup.Backup() - post_with_metadata.return_value = gsad_backup.Backup(), metadata - - client.update_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_rest_bad_request(request_type=backup.DeleteBackupRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backup.DeleteBackupRequest, - dict, -]) -def test_delete_backup_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/backups/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup") as pre: - pre.assert_not_called() - pb_message = backup.DeleteBackupRequest.pb(backup.DeleteBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = backup.DeleteBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_backups_rest_bad_request(request_type=backup.ListBackupsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) - - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupsRequest, - dict, -]) -def test_list_backups_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backups_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backups") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.ListBackupsRequest.pb(backup.ListBackupsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.ListBackupsResponse.to_json(backup.ListBackupsResponse()) - req.return_value.content = return_value - - request = backup.ListBackupsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.ListBackupsResponse() - post_with_metadata.return_value = backup.ListBackupsResponse(), metadata - - client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_restore_database_rest_bad_request(request_type=spanner_database_admin.RestoreDatabaseRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_database(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.RestoreDatabaseRequest, - dict, -]) -def test_restore_database_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_database(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_database_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_restore_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_restore_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.RestoreDatabaseRequest.pb(spanner_database_admin.RestoreDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_database_admin.RestoreDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.restore_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_database_operations_rest_bad_request(request_type=spanner_database_admin.ListDatabaseOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_database_operations(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseOperationsRequest, - dict, -]) -def test_list_database_operations_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseOperationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_database_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_database_operations_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.ListDatabaseOperationsRequest.pb(spanner_database_admin.ListDatabaseOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.ListDatabaseOperationsResponse.to_json(spanner_database_admin.ListDatabaseOperationsResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.ListDatabaseOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabaseOperationsResponse() - post_with_metadata.return_value = spanner_database_admin.ListDatabaseOperationsResponse(), metadata - - client.list_database_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_operations_rest_bad_request(request_type=backup.ListBackupOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_operations(request) - - -@pytest.mark.parametrize("request_type", [ - backup.ListBackupOperationsRequest, - dict, -]) -def test_list_backup_operations_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup.ListBackupOperationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup.ListBackupOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_operations_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup.ListBackupOperationsRequest.pb(backup.ListBackupOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup.ListBackupOperationsResponse.to_json(backup.ListBackupOperationsResponse()) - req.return_value.content = return_value - - request = backup.ListBackupOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup.ListBackupOperationsResponse() - post_with_metadata.return_value = backup.ListBackupOperationsResponse(), metadata - - client.list_backup_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_database_roles_rest_bad_request(request_type=spanner_database_admin.ListDatabaseRolesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_database_roles(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.ListDatabaseRolesRequest, - dict, -]) -def test_list_database_roles_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.ListDatabaseRolesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.ListDatabaseRolesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_database_roles(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabaseRolesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_database_roles_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_database_roles_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_database_roles") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.ListDatabaseRolesRequest.pb(spanner_database_admin.ListDatabaseRolesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.ListDatabaseRolesResponse.to_json(spanner_database_admin.ListDatabaseRolesResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.ListDatabaseRolesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.ListDatabaseRolesResponse() - post_with_metadata.return_value = spanner_database_admin.ListDatabaseRolesResponse(), metadata - - client.list_database_roles(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_add_split_points_rest_bad_request(request_type=spanner_database_admin.AddSplitPointsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.add_split_points(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_database_admin.AddSplitPointsRequest, - dict, -]) -def test_add_split_points_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_database_admin.AddSplitPointsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_database_admin.AddSplitPointsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.add_split_points(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_database_admin.AddSplitPointsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_add_split_points_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_add_split_points_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_add_split_points") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_database_admin.AddSplitPointsRequest.pb(spanner_database_admin.AddSplitPointsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_database_admin.AddSplitPointsResponse.to_json(spanner_database_admin.AddSplitPointsResponse()) - req.return_value.content = return_value - - request = spanner_database_admin.AddSplitPointsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_database_admin.AddSplitPointsResponse() - post_with_metadata.return_value = spanner_database_admin.AddSplitPointsResponse(), metadata - - client.add_split_points(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.CreateBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.CreateBackupScheduleRequest, - dict, -]) -def test_create_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request_init["backup_schedule"] = {'name': 'name_value', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup_schedule.CreateBackupScheduleRequest.meta.fields["backup_schedule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_create_backup_schedule_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_create_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup_schedule.CreateBackupScheduleRequest.pb(gsad_backup_schedule.CreateBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) - req.return_value.content = return_value - - request = gsad_backup_schedule.CreateBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gsad_backup_schedule.BackupSchedule() - post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata - - client.create_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_schedule_rest_bad_request(request_type=backup_schedule.GetBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.GetBackupScheduleRequest, - dict, -]) -def test_get_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_get_backup_schedule_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_get_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup_schedule.GetBackupScheduleRequest.pb(backup_schedule.GetBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_schedule.BackupSchedule.to_json(backup_schedule.BackupSchedule()) - req.return_value.content = return_value - - request = backup_schedule.GetBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup_schedule.BackupSchedule() - post_with_metadata.return_value = backup_schedule.BackupSchedule(), metadata - - client.get_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_schedule_rest_bad_request(request_type=gsad_backup_schedule.UpdateBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - gsad_backup_schedule.UpdateBackupScheduleRequest, - dict, -]) -def test_update_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'backup_schedule': {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'}} - request_init["backup_schedule"] = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4', 'spec': {'cron_spec': {'text': 'text_value', 'time_zone': 'time_zone_value', 'creation_window': {'seconds': 751, 'nanos': 543}}}, 'retention_duration': {}, 'encryption_config': {'encryption_type': 1, 'kms_key_name': 'kms_key_name_value', 'kms_key_names': ['kms_key_names_value1', 'kms_key_names_value2']}, 'full_backup_spec': {}, 'incremental_backup_spec': {}, 'update_time': {'seconds': 751, 'nanos': 543}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gsad_backup_schedule.UpdateBackupScheduleRequest.meta.fields["backup_schedule"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_schedule"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_schedule"][field])): - del request_init["backup_schedule"][field][i][subfield] - else: - del request_init["backup_schedule"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gsad_backup_schedule.BackupSchedule( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gsad_backup_schedule.BackupSchedule.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gsad_backup_schedule.BackupSchedule) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_update_backup_schedule_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_update_backup_schedule") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gsad_backup_schedule.UpdateBackupScheduleRequest.pb(gsad_backup_schedule.UpdateBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gsad_backup_schedule.BackupSchedule.to_json(gsad_backup_schedule.BackupSchedule()) - req.return_value.content = return_value - - request = gsad_backup_schedule.UpdateBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gsad_backup_schedule.BackupSchedule() - post_with_metadata.return_value = gsad_backup_schedule.BackupSchedule(), metadata - - client.update_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_schedule_rest_bad_request(request_type=backup_schedule.DeleteBackupScheduleRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_schedule(request) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.DeleteBackupScheduleRequest, - dict, -]) -def test_delete_backup_schedule_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/backupSchedules/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_schedule(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_schedule_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_delete_backup_schedule") as pre: - pre.assert_not_called() - pb_message = backup_schedule.DeleteBackupScheduleRequest.pb(backup_schedule.DeleteBackupScheduleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = backup_schedule.DeleteBackupScheduleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_backup_schedule(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_backup_schedules_rest_bad_request(request_type=backup_schedule.ListBackupSchedulesRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_schedules(request) - - -@pytest.mark.parametrize("request_type", [ - backup_schedule.ListBackupSchedulesRequest, - dict, -]) -def test_list_backup_schedules_rest_call_success(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2/databases/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backup_schedule.ListBackupSchedulesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backup_schedule.ListBackupSchedulesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_schedules(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupSchedulesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_schedules_rest_interceptors(null_interceptor): - transport = transports.DatabaseAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DatabaseAdminRestInterceptor(), - ) - client = DatabaseAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules") as post, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "post_list_backup_schedules_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DatabaseAdminRestInterceptor, "pre_list_backup_schedules") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backup_schedule.ListBackupSchedulesRequest.pb(backup_schedule.ListBackupSchedulesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backup_schedule.ListBackupSchedulesResponse.to_json(backup_schedule.ListBackupSchedulesResponse()) - req.return_value.content = return_value - - request = backup_schedule.ListBackupSchedulesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backup_schedule.ListBackupSchedulesResponse() - post_with_metadata.return_value = backup_schedule.ListBackupSchedulesResponse(), metadata - - client.list_backup_schedules(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_internal_update_graph_operation_rest_error(): - - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.internal_update_graph_operation({}) - assert ( - "Method InternalUpdateGraphOperation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_ddl_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database_ddl), - '__call__') as call: - client.update_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.UpdateDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_drop_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.drop_database), - '__call__') as call: - client.drop_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.DropDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_ddl_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database_ddl), - '__call__') as call: - client.get_database_ddl(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.GetDatabaseDdlRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup), - '__call__') as call: - client.create_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.CreateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_copy_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.copy_backup), - '__call__') as call: - client.copy_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.CopyBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_database_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_database), - '__call__') as call: - client.restore_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.RestoreDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_operations_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_operations), - '__call__') as call: - client.list_database_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_operations_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_operations), - '__call__') as call: - client.list_backup_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup.ListBackupOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_database_roles_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_database_roles), - '__call__') as call: - client.list_database_roles(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.ListDatabaseRolesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_add_split_points_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.add_split_points), - '__call__') as call: - client.add_split_points(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.AddSplitPointsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_schedule), - '__call__') as call: - client.create_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.CreateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_schedule), - '__call__') as call: - client.get_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.GetBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_schedule), - '__call__') as call: - client.update_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gsad_backup_schedule.UpdateBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_schedule_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_schedule), - '__call__') as call: - client.delete_backup_schedule(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.DeleteBackupScheduleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_schedules_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_schedules), - '__call__') as call: - client.list_backup_schedules(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backup_schedule.ListBackupSchedulesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_internal_update_graph_operation_empty_call_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.internal_update_graph_operation), - '__call__') as call: - client.internal_update_graph_operation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_database_admin.InternalUpdateGraphOperationRequest() - - assert args[0] == request_msg - - -def test_database_admin_rest_lro_client(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DatabaseAdminGrpcTransport, - ) - -def test_database_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DatabaseAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_database_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DatabaseAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_databases', - 'create_database', - 'get_database', - 'update_database', - 'update_database_ddl', - 'drop_database', - 'get_database_ddl', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'create_backup', - 'copy_backup', - 'get_backup', - 'update_backup', - 'delete_backup', - 'list_backups', - 'restore_database', - 'list_database_operations', - 'list_backup_operations', - 'list_database_roles', - 'add_split_points', - 'create_backup_schedule', - 'get_backup_schedule', - 'update_backup_schedule', - 'delete_backup_schedule', - 'list_backup_schedules', - 'internal_update_graph_operation', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_database_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id="octopus", - ) - - -def test_database_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_database_v1.services.database_admin.transports.DatabaseAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DatabaseAdminTransport() - adc.assert_called_once() - - -def test_database_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DatabaseAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - ], -) -def test_database_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DatabaseAdminGrpcTransport, - transports.DatabaseAdminGrpcAsyncIOTransport, - transports.DatabaseAdminRestTransport, - ], -) -def test_database_admin_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DatabaseAdminGrpcTransport, grpc_helpers), - (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_database_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_database_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DatabaseAdminRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_database_admin_host_no_port(transport_name): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_database_admin_host_with_port(transport_name): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_database_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DatabaseAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DatabaseAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_databases._session - session2 = client2.transport.list_databases._session - assert session1 != session2 - session1 = client1.transport.create_database._session - session2 = client2.transport.create_database._session - assert session1 != session2 - session1 = client1.transport.get_database._session - session2 = client2.transport.get_database._session - assert session1 != session2 - session1 = client1.transport.update_database._session - session2 = client2.transport.update_database._session - assert session1 != session2 - session1 = client1.transport.update_database_ddl._session - session2 = client2.transport.update_database_ddl._session - assert session1 != session2 - session1 = client1.transport.drop_database._session - session2 = client2.transport.drop_database._session - assert session1 != session2 - session1 = client1.transport.get_database_ddl._session - session2 = client2.transport.get_database_ddl._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 - session1 = client1.transport.create_backup._session - session2 = client2.transport.create_backup._session - assert session1 != session2 - session1 = client1.transport.copy_backup._session - session2 = client2.transport.copy_backup._session - assert session1 != session2 - session1 = client1.transport.get_backup._session - session2 = client2.transport.get_backup._session - assert session1 != session2 - session1 = client1.transport.update_backup._session - session2 = client2.transport.update_backup._session - assert session1 != session2 - session1 = client1.transport.delete_backup._session - session2 = client2.transport.delete_backup._session - assert session1 != session2 - session1 = client1.transport.list_backups._session - session2 = client2.transport.list_backups._session - assert session1 != session2 - session1 = client1.transport.restore_database._session - session2 = client2.transport.restore_database._session - assert session1 != session2 - session1 = client1.transport.list_database_operations._session - session2 = client2.transport.list_database_operations._session - assert session1 != session2 - session1 = client1.transport.list_backup_operations._session - session2 = client2.transport.list_backup_operations._session - assert session1 != session2 - session1 = client1.transport.list_database_roles._session - session2 = client2.transport.list_database_roles._session - assert session1 != session2 - session1 = client1.transport.add_split_points._session - session2 = client2.transport.add_split_points._session - assert session1 != session2 - session1 = client1.transport.create_backup_schedule._session - session2 = client2.transport.create_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.get_backup_schedule._session - session2 = client2.transport.get_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.update_backup_schedule._session - session2 = client2.transport.update_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.delete_backup_schedule._session - session2 = client2.transport.delete_backup_schedule._session - assert session1 != session2 - session1 = client1.transport.list_backup_schedules._session - session2 = client2.transport.list_backup_schedules._session - assert session1 != session2 - session1 = client1.transport.internal_update_graph_operation._session - session2 = client2.transport.internal_update_graph_operation._session - assert session1 != session2 -def test_database_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatabaseAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_database_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DatabaseAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DatabaseAdminGrpcTransport, transports.DatabaseAdminGrpcAsyncIOTransport]) -def test_database_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_database_admin_grpc_lro_client(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_database_admin_grpc_lro_async_client(): - client = DatabaseAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_path(): - project = "squid" - instance = "clam" - backup = "whelk" - expected = "projects/{project}/instances/{instance}/backups/{backup}".format(project=project, instance=instance, backup=backup, ) - actual = DatabaseAdminClient.backup_path(project, instance, backup) - assert expected == actual - - -def test_parse_backup_path(): - expected = { - "project": "octopus", - "instance": "oyster", - "backup": "nudibranch", - } - path = DatabaseAdminClient.backup_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_backup_path(path) - assert expected == actual - -def test_backup_schedule_path(): - project = "cuttlefish" - instance = "mussel" - database = "winkle" - schedule = "nautilus" - expected = "projects/{project}/instances/{instance}/databases/{database}/backupSchedules/{schedule}".format(project=project, instance=instance, database=database, schedule=schedule, ) - actual = DatabaseAdminClient.backup_schedule_path(project, instance, database, schedule) - assert expected == actual - - -def test_parse_backup_schedule_path(): - expected = { - "project": "scallop", - "instance": "abalone", - "database": "squid", - "schedule": "clam", - } - path = DatabaseAdminClient.backup_schedule_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_backup_schedule_path(path) - assert expected == actual - -def test_crypto_key_path(): - project = "whelk" - location = "octopus" - key_ring = "oyster" - crypto_key = "nudibranch" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - actual = DatabaseAdminClient.crypto_key_path(project, location, key_ring, crypto_key) - assert expected == actual - - -def test_parse_crypto_key_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - "key_ring": "winkle", - "crypto_key": "nautilus", - } - path = DatabaseAdminClient.crypto_key_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_crypto_key_path(path) - assert expected == actual - -def test_crypto_key_version_path(): - project = "scallop" - location = "abalone" - key_ring = "squid" - crypto_key = "clam" - crypto_key_version = "whelk" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, crypto_key_version=crypto_key_version, ) - actual = DatabaseAdminClient.crypto_key_version_path(project, location, key_ring, crypto_key, crypto_key_version) - assert expected == actual - - -def test_parse_crypto_key_version_path(): - expected = { - "project": "octopus", - "location": "oyster", - "key_ring": "nudibranch", - "crypto_key": "cuttlefish", - "crypto_key_version": "mussel", - } - path = DatabaseAdminClient.crypto_key_version_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_crypto_key_version_path(path) - assert expected == actual - -def test_database_path(): - project = "winkle" - instance = "nautilus" - database = "scallop" - expected = "projects/{project}/instances/{instance}/databases/{database}".format(project=project, instance=instance, database=database, ) - actual = DatabaseAdminClient.database_path(project, instance, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "abalone", - "instance": "squid", - "database": "clam", - } - path = DatabaseAdminClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_database_path(path) - assert expected == actual - -def test_database_role_path(): - project = "whelk" - instance = "octopus" - database = "oyster" - role = "nudibranch" - expected = "projects/{project}/instances/{instance}/databases/{database}/databaseRoles/{role}".format(project=project, instance=instance, database=database, role=role, ) - actual = DatabaseAdminClient.database_role_path(project, instance, database, role) - assert expected == actual - - -def test_parse_database_role_path(): - expected = { - "project": "cuttlefish", - "instance": "mussel", - "database": "winkle", - "role": "nautilus", - } - path = DatabaseAdminClient.database_role_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_database_role_path(path) - assert expected == actual - -def test_instance_path(): - project = "scallop" - instance = "abalone" - expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - actual = DatabaseAdminClient.instance_path(project, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "squid", - "instance": "clam", - } - path = DatabaseAdminClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_instance_path(path) - assert expected == actual - -def test_instance_partition_path(): - project = "whelk" - instance = "octopus" - instance_partition = "oyster" - expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - actual = DatabaseAdminClient.instance_partition_path(project, instance, instance_partition) - assert expected == actual - - -def test_parse_instance_partition_path(): - expected = { - "project": "nudibranch", - "instance": "cuttlefish", - "instance_partition": "mussel", - } - path = DatabaseAdminClient.instance_partition_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_instance_partition_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DatabaseAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = DatabaseAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = DatabaseAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = DatabaseAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DatabaseAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = DatabaseAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = DatabaseAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = DatabaseAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DatabaseAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = DatabaseAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DatabaseAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DatabaseAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = DatabaseAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DatabaseAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DatabaseAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DatabaseAdminClient, transports.DatabaseAdminGrpcTransport), - (DatabaseAdminAsyncClient, transports.DatabaseAdminGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc b/owl-bot-staging/spanner_admin_instance/v1/.coveragerc deleted file mode 100644 index 040d100fc8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/spanner_admin_instance/__init__.py - google/cloud/spanner_admin_instance/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/.flake8 b/owl-bot-staging/spanner_admin_instance/v1/.flake8 deleted file mode 100644 index 90316de214..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/.flake8 +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -[flake8] -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): -# Resolve flake8 lint issues -ignore = E203, E231, E266, E501, W503 -exclude = - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): - # Ensure that generated code passes flake8 lint - **/gapic/** - **/services/** - **/types/** - # Exclude Protobuf gencode - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/spanner_admin_instance/v1/LICENSE b/owl-bot-staging/spanner_admin_instance/v1/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in b/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in deleted file mode 100644 index dae249ec89..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/MANIFEST.in +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -include README.rst LICENSE -recursive-include google *.py *.pyi *.json *.proto py.typed -recursive-include tests * -global-exclude *.py[co] -global-exclude __pycache__ diff --git a/owl-bot-staging/spanner_admin_instance/v1/README.rst b/owl-bot-staging/spanner_admin_instance/v1/README.rst deleted file mode 100644 index 7123886f79..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Spanner Admin Instance API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Spanner Admin Instance API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css b/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css deleted file mode 100644 index b0a295464b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/_static/custom.css +++ /dev/null @@ -1,20 +0,0 @@ -div#python2-eol { - border-color: red; - border-width: medium; -} - -/* Ensure minimum width for 'Parameters' / 'Returns' column */ -dl.field-list > dt { - min-width: 100px -} - -/* Insert space between methods for readability */ -dl.method { - padding-top: 10px; - padding-bottom: 10px -} - -/* Insert empty space between classes */ -dl.class { - padding-bottom: 50px -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html b/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html deleted file mode 100644 index 95e9c77fcf..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/_templates/layout.html +++ /dev/null @@ -1,50 +0,0 @@ - -{% extends "!layout.html" %} -{%- block content %} -{%- if theme_fixed_sidebar|lower == 'true' %} -
- {{ sidebar() }} - {%- block document %} -
- {%- if render_sidebar %} -
- {%- endif %} - - {%- block relbar_top %} - {%- if theme_show_relbar_top|tobool %} - - {%- endif %} - {% endblock %} - -
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. - Library versions released prior to that date will continue to be available. For more information please - visit Python 2 support on Google Cloud. -
- {% block body %} {% endblock %} -
- - {%- block relbar_bottom %} - {%- if theme_show_relbar_bottom|tobool %} - - {%- endif %} - {% endblock %} - - {%- if render_sidebar %} -
- {%- endif %} -
- {%- endblock %} -
-
-{%- else %} -{{ super() }} -{%- endif %} -{%- endblock %} diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py b/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py deleted file mode 100644 index 4e0af4e74e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/conf.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-spanner-admin-instance documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -# For plugins that can not read conf.py. -# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 -sys.path.insert(0, os.path.abspath(".")) - -__version__ = "" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.5.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "recommonmark", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_options = {"members": True} -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-spanner-admin-instance" -copyright = u"2025, Google, LLC" -author = u"Google APIs" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [ - "_build", - "**/.nox/**/*", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/README.rst", -] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for google-cloud-spanner-admin-instance", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-spanner-admin-instance-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-instance.tex", - u"google-cloud-spanner-admin-instance Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-spanner-admin-instance", - "google-cloud-spanner-admin-instance Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-spanner-admin-instance", - "google-cloud-spanner-admin-instance Documentation", - author, - "google-cloud-spanner-admin-instance", - "google-cloud-spanner-admin-instance Library", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("https://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ( - "https://googleapis.dev/python/google-api-core/latest/", - None, - ), - "grpc": ("https://grpc.github.io/grpc/python/", None), - "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst deleted file mode 100644 index 545d74cbb9..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. include:: multiprocessing.rst - - -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - spanner_admin_instance_v1/services_ - spanner_admin_instance_v1/types_ diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst deleted file mode 100644 index 536d17b2ea..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/multiprocessing.rst +++ /dev/null @@ -1,7 +0,0 @@ -.. note:: - - Because this client uses :mod:`grpc` library, it is safe to - share instances across threads. In multiprocessing scenarios, the best - practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.pool.Pool` or - :class:`multiprocessing.Process`. diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst deleted file mode 100644 index fe820b3fad..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/instance_admin.rst +++ /dev/null @@ -1,10 +0,0 @@ -InstanceAdmin -------------------------------- - -.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin - :members: - :inherited-members: - -.. automodule:: google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst deleted file mode 100644 index 407d44cc34..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Spanner Admin Instance v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - instance_admin diff --git a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst b/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst deleted file mode 100644 index 250cf6bf9b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/docs/spanner_admin_instance_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Spanner Admin Instance v1 API -==================================================== - -.. automodule:: google.cloud.spanner_admin_instance_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py deleted file mode 100644 index a729ed0a13..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/__init__.py +++ /dev/null @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_instance import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.spanner_admin_instance_v1.services.instance_admin.client import InstanceAdminClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin.async_client import InstanceAdminAsyncClient - -from google.cloud.spanner_admin_instance_v1.types.common import OperationProgress -from google.cloud.spanner_admin_instance_v1.types.common import ReplicaSelection -from google.cloud.spanner_admin_instance_v1.types.common import FulfillmentPeriod -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import AutoscalingConfig -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import CreateInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import DeleteInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import FreeInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import GetInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import Instance -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstanceConfig -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import InstancePartition -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigOperationsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstanceConfigsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionOperationsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancePartitionsResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ListInstancesResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import MoveInstanceResponse -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaComputeCapacity -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import ReplicaInfo -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceConfigRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionMetadata -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstancePartitionRequest -from google.cloud.spanner_admin_instance_v1.types.spanner_instance_admin import UpdateInstanceRequest - -__all__ = ('InstanceAdminClient', - 'InstanceAdminAsyncClient', - 'OperationProgress', - 'ReplicaSelection', - 'FulfillmentPeriod', - 'AutoscalingConfig', - 'CreateInstanceConfigMetadata', - 'CreateInstanceConfigRequest', - 'CreateInstanceMetadata', - 'CreateInstancePartitionMetadata', - 'CreateInstancePartitionRequest', - 'CreateInstanceRequest', - 'DeleteInstanceConfigRequest', - 'DeleteInstancePartitionRequest', - 'DeleteInstanceRequest', - 'FreeInstanceMetadata', - 'GetInstanceConfigRequest', - 'GetInstancePartitionRequest', - 'GetInstanceRequest', - 'Instance', - 'InstanceConfig', - 'InstancePartition', - 'ListInstanceConfigOperationsRequest', - 'ListInstanceConfigOperationsResponse', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'ListInstancePartitionOperationsRequest', - 'ListInstancePartitionOperationsResponse', - 'ListInstancePartitionsRequest', - 'ListInstancePartitionsResponse', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MoveInstanceMetadata', - 'MoveInstanceRequest', - 'MoveInstanceResponse', - 'ReplicaComputeCapacity', - 'ReplicaInfo', - 'UpdateInstanceConfigMetadata', - 'UpdateInstanceConfigRequest', - 'UpdateInstanceMetadata', - 'UpdateInstancePartitionMetadata', - 'UpdateInstancePartitionRequest', - 'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed deleted file mode 100644 index 915a8e55e3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py deleted file mode 100644 index 384e545d89..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/__init__.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.instance_admin import InstanceAdminClient -from .services.instance_admin import InstanceAdminAsyncClient - -from .types.common import OperationProgress -from .types.common import ReplicaSelection -from .types.common import FulfillmentPeriod -from .types.spanner_instance_admin import AutoscalingConfig -from .types.spanner_instance_admin import CreateInstanceConfigMetadata -from .types.spanner_instance_admin import CreateInstanceConfigRequest -from .types.spanner_instance_admin import CreateInstanceMetadata -from .types.spanner_instance_admin import CreateInstancePartitionMetadata -from .types.spanner_instance_admin import CreateInstancePartitionRequest -from .types.spanner_instance_admin import CreateInstanceRequest -from .types.spanner_instance_admin import DeleteInstanceConfigRequest -from .types.spanner_instance_admin import DeleteInstancePartitionRequest -from .types.spanner_instance_admin import DeleteInstanceRequest -from .types.spanner_instance_admin import FreeInstanceMetadata -from .types.spanner_instance_admin import GetInstanceConfigRequest -from .types.spanner_instance_admin import GetInstancePartitionRequest -from .types.spanner_instance_admin import GetInstanceRequest -from .types.spanner_instance_admin import Instance -from .types.spanner_instance_admin import InstanceConfig -from .types.spanner_instance_admin import InstancePartition -from .types.spanner_instance_admin import ListInstanceConfigOperationsRequest -from .types.spanner_instance_admin import ListInstanceConfigOperationsResponse -from .types.spanner_instance_admin import ListInstanceConfigsRequest -from .types.spanner_instance_admin import ListInstanceConfigsResponse -from .types.spanner_instance_admin import ListInstancePartitionOperationsRequest -from .types.spanner_instance_admin import ListInstancePartitionOperationsResponse -from .types.spanner_instance_admin import ListInstancePartitionsRequest -from .types.spanner_instance_admin import ListInstancePartitionsResponse -from .types.spanner_instance_admin import ListInstancesRequest -from .types.spanner_instance_admin import ListInstancesResponse -from .types.spanner_instance_admin import MoveInstanceMetadata -from .types.spanner_instance_admin import MoveInstanceRequest -from .types.spanner_instance_admin import MoveInstanceResponse -from .types.spanner_instance_admin import ReplicaComputeCapacity -from .types.spanner_instance_admin import ReplicaInfo -from .types.spanner_instance_admin import UpdateInstanceConfigMetadata -from .types.spanner_instance_admin import UpdateInstanceConfigRequest -from .types.spanner_instance_admin import UpdateInstanceMetadata -from .types.spanner_instance_admin import UpdateInstancePartitionMetadata -from .types.spanner_instance_admin import UpdateInstancePartitionRequest -from .types.spanner_instance_admin import UpdateInstanceRequest - -__all__ = ( - 'InstanceAdminAsyncClient', -'AutoscalingConfig', -'CreateInstanceConfigMetadata', -'CreateInstanceConfigRequest', -'CreateInstanceMetadata', -'CreateInstancePartitionMetadata', -'CreateInstancePartitionRequest', -'CreateInstanceRequest', -'DeleteInstanceConfigRequest', -'DeleteInstancePartitionRequest', -'DeleteInstanceRequest', -'FreeInstanceMetadata', -'FulfillmentPeriod', -'GetInstanceConfigRequest', -'GetInstancePartitionRequest', -'GetInstanceRequest', -'Instance', -'InstanceAdminClient', -'InstanceConfig', -'InstancePartition', -'ListInstanceConfigOperationsRequest', -'ListInstanceConfigOperationsResponse', -'ListInstanceConfigsRequest', -'ListInstanceConfigsResponse', -'ListInstancePartitionOperationsRequest', -'ListInstancePartitionOperationsResponse', -'ListInstancePartitionsRequest', -'ListInstancePartitionsResponse', -'ListInstancesRequest', -'ListInstancesResponse', -'MoveInstanceMetadata', -'MoveInstanceRequest', -'MoveInstanceResponse', -'OperationProgress', -'ReplicaComputeCapacity', -'ReplicaInfo', -'ReplicaSelection', -'UpdateInstanceConfigMetadata', -'UpdateInstanceConfigRequest', -'UpdateInstanceMetadata', -'UpdateInstancePartitionMetadata', -'UpdateInstancePartitionRequest', -'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json deleted file mode 100644 index 60fa46718a..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_metadata.json +++ /dev/null @@ -1,343 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.spanner_admin_instance_v1", - "protoPackage": "google.spanner.admin.instance.v1", - "schema": "1.0", - "services": { - "InstanceAdmin": { - "clients": { - "grpc": { - "libraryClient": "InstanceAdminClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "CreateInstanceConfig": { - "methods": [ - "create_instance_config" - ] - }, - "CreateInstancePartition": { - "methods": [ - "create_instance_partition" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "DeleteInstanceConfig": { - "methods": [ - "delete_instance_config" - ] - }, - "DeleteInstancePartition": { - "methods": [ - "delete_instance_partition" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "GetInstancePartition": { - "methods": [ - "get_instance_partition" - ] - }, - "ListInstanceConfigOperations": { - "methods": [ - "list_instance_config_operations" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstancePartitionOperations": { - "methods": [ - "list_instance_partition_operations" - ] - }, - "ListInstancePartitions": { - "methods": [ - "list_instance_partitions" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "MoveInstance": { - "methods": [ - "move_instance" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateInstanceConfig": { - "methods": [ - "update_instance_config" - ] - }, - "UpdateInstancePartition": { - "methods": [ - "update_instance_partition" - ] - } - } - }, - "grpc-async": { - "libraryClient": "InstanceAdminAsyncClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "CreateInstanceConfig": { - "methods": [ - "create_instance_config" - ] - }, - "CreateInstancePartition": { - "methods": [ - "create_instance_partition" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "DeleteInstanceConfig": { - "methods": [ - "delete_instance_config" - ] - }, - "DeleteInstancePartition": { - "methods": [ - "delete_instance_partition" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "GetInstancePartition": { - "methods": [ - "get_instance_partition" - ] - }, - "ListInstanceConfigOperations": { - "methods": [ - "list_instance_config_operations" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstancePartitionOperations": { - "methods": [ - "list_instance_partition_operations" - ] - }, - "ListInstancePartitions": { - "methods": [ - "list_instance_partitions" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "MoveInstance": { - "methods": [ - "move_instance" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateInstanceConfig": { - "methods": [ - "update_instance_config" - ] - }, - "UpdateInstancePartition": { - "methods": [ - "update_instance_partition" - ] - } - } - }, - "rest": { - "libraryClient": "InstanceAdminClient", - "rpcs": { - "CreateInstance": { - "methods": [ - "create_instance" - ] - }, - "CreateInstanceConfig": { - "methods": [ - "create_instance_config" - ] - }, - "CreateInstancePartition": { - "methods": [ - "create_instance_partition" - ] - }, - "DeleteInstance": { - "methods": [ - "delete_instance" - ] - }, - "DeleteInstanceConfig": { - "methods": [ - "delete_instance_config" - ] - }, - "DeleteInstancePartition": { - "methods": [ - "delete_instance_partition" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetInstanceConfig": { - "methods": [ - "get_instance_config" - ] - }, - "GetInstancePartition": { - "methods": [ - "get_instance_partition" - ] - }, - "ListInstanceConfigOperations": { - "methods": [ - "list_instance_config_operations" - ] - }, - "ListInstanceConfigs": { - "methods": [ - "list_instance_configs" - ] - }, - "ListInstancePartitionOperations": { - "methods": [ - "list_instance_partition_operations" - ] - }, - "ListInstancePartitions": { - "methods": [ - "list_instance_partitions" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "MoveInstance": { - "methods": [ - "move_instance" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateInstanceConfig": { - "methods": [ - "update_instance_config" - ] - }, - "UpdateInstancePartition": { - "methods": [ - "update_instance_partition" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py deleted file mode 100644 index 20a9cd975b..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed deleted file mode 100644 index 915a8e55e3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-spanner-admin-instance package uses inline types. diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py deleted file mode 100644 index cbf94b283c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py deleted file mode 100644 index 72ef4ab187..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import InstanceAdminClient -from .async_client import InstanceAdminAsyncClient - -__all__ = ( - 'InstanceAdminClient', - 'InstanceAdminAsyncClient', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py deleted file mode 100644 index de12cfc17e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/async_client.py +++ /dev/null @@ -1,3468 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import uuid - -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .client import InstanceAdminClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class InstanceAdminAsyncClient: - """Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - """ - - _client: InstanceAdminClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = InstanceAdminClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = InstanceAdminClient._DEFAULT_UNIVERSE - - instance_path = staticmethod(InstanceAdminClient.instance_path) - parse_instance_path = staticmethod(InstanceAdminClient.parse_instance_path) - instance_config_path = staticmethod(InstanceAdminClient.instance_config_path) - parse_instance_config_path = staticmethod(InstanceAdminClient.parse_instance_config_path) - instance_partition_path = staticmethod(InstanceAdminClient.instance_partition_path) - parse_instance_partition_path = staticmethod(InstanceAdminClient.parse_instance_partition_path) - common_billing_account_path = staticmethod(InstanceAdminClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(InstanceAdminClient.parse_common_billing_account_path) - common_folder_path = staticmethod(InstanceAdminClient.common_folder_path) - parse_common_folder_path = staticmethod(InstanceAdminClient.parse_common_folder_path) - common_organization_path = staticmethod(InstanceAdminClient.common_organization_path) - parse_common_organization_path = staticmethod(InstanceAdminClient.parse_common_organization_path) - common_project_path = staticmethod(InstanceAdminClient.common_project_path) - parse_common_project_path = staticmethod(InstanceAdminClient.parse_common_project_path) - common_location_path = staticmethod(InstanceAdminClient.common_location_path) - parse_common_location_path = staticmethod(InstanceAdminClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminAsyncClient: The constructed client. - """ - return InstanceAdminClient.from_service_account_info.__func__(InstanceAdminAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminAsyncClient: The constructed client. - """ - return InstanceAdminClient.from_service_account_file.__func__(InstanceAdminAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return InstanceAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> InstanceAdminTransport: - """Returns the transport used by the client instance. - - Returns: - InstanceAdminTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = InstanceAdminClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the instance admin async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the InstanceAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = InstanceAdminClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.instance_v1.InstanceAdminAsyncClient`.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "credentialsType": None, - } - ) - - async def list_instance_configs(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigsAsyncPager: - r"""Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]]): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - parent (:class:`str`): - Required. The name of the project for which a list of - supported instance configurations is requested. Values - are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): - request = spanner_instance_admin.ListInstanceConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstanceConfigsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance_config(self, - request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstanceConfig: - r"""Gets information about a particular instance - configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]]): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - name (:class:`str`): - Required. The name of the requested instance - configuration. Values are of the form - ``projects//instanceConfigs/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): - request = spanner_instance_admin.GetInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance_config(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - instance_config_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]]): - The request object. The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - parent (:class:`str`): - Required. The name of the project in which to create the - instance configuration. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): - Required. The ``InstanceConfig`` proto of the - configuration to create. ``instance_config.name`` must - be ``/instanceConfigs/``. - ``instance_config.base_config`` must be a Google-managed - configuration name, e.g. /instanceConfigs/us-east1, - /instanceConfigs/nam3. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config_id (:class:`str`): - Required. The ID of the instance configuration to - create. Valid identifiers are of the form - ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and - 64 characters in length. The ``custom-`` prefix is - required to avoid name conflicts with Google-managed - configurations. - - This corresponds to the ``instance_config_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_config, instance_config_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): - request = spanner_instance_admin.CreateInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_config is not None: - request.instance_config = instance_config - if instance_config_id is not None: - request.instance_config_id = instance_config_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - async def update_instance_config(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, - *, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]]): - The request object. The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - instance_config (:class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig`): - Required. The user instance configuration to update, - which must always include the instance configuration - name. Otherwise, only fields mentioned in - [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] - need be included. To prevent conflicts of concurrent - updates, - [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - can be used. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - from being erased accidentally by clients that do not - know about them. Only display_name and labels can be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): - request = spanner_instance_admin.UpdateInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_config is not None: - request.instance_config = instance_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_config.name", request.instance_config.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance_config(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_config(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]]): - The request object. The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - name (:class:`str`): - Required. The name of the instance configuration to be - deleted. Values are of the form - ``projects//instanceConfigs/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): - request = spanner_instance_admin.DeleteInstanceConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_instance_config_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigOperationsAsyncPager: - r"""Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]]): - The request object. The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - parent (:class:`str`): - Required. The project of the instance configuration - operations. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager: - The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): - request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_config_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstanceConfigOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_instances(self, - request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: - r"""Lists all instances in the given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]]): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - parent (:class:`str`): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancesRequest): - request = spanner_instance_admin.ListInstancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_instance_partitions(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionsAsyncPager: - r"""Lists all instance partitions for the given instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]]): - The request object. The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - parent (:class:`str`): - Required. The instance whose instance partitions should - be listed. Values are of the form - ``projects//instances/``. Use - ``{instance} = '-'`` to list instance partitions for all - Instances in a project, e.g., - ``projects/myproject/instances/-``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager: - The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): - request = spanner_instance_admin.ListInstancePartitionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancePartitionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance(self, - request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.Instance: - r"""Gets information about a particular instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]]): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - name (:class:`str`): - Required. The name of the requested instance. Values are - of the form ``projects//instances/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceRequest): - request = spanner_instance_admin.GetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[spanner_instance_admin.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]]): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - parent (:class:`str`): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (:class:`str`): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): - Required. The instance to create. The name may be - omitted, but if specified must be - ``/instances/``. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): - request = spanner_instance_admin.CreateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if instance is not None: - request.instance = instance - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.CreateInstanceMetadata, - ) - - # Done; return the response. - return response - - async def update_instance(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[spanner_instance_admin.Instance] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]]): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - instance (:class:`google.cloud.spanner_admin_instance_v1.types.Instance`): - Required. The instance to update, which must always - include the instance name. Otherwise, only fields - mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] - should be updated. The field mask must always be - specified; this prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): - request = spanner_instance_admin.UpdateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.UpdateInstanceMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]]): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - name (:class:`str`): - Required. The name of the instance to be deleted. Values - are of the form - ``projects//instances/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): - request = spanner_instance_admin.DeleteInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`MutableSequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance_partition(self, - request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstancePartition: - r"""Gets information about a particular instance - partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]]): - The request object. The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - name (:class:`str`): - Required. The name of the requested instance partition. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstancePartition: - An isolated set of Cloud Spanner - resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): - request = spanner_instance_admin.GetInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_instance_partition(self, - request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - instance_partition_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]]): - The request object. The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - parent (:class:`str`): - Required. The name of the instance in which to create - the instance partition. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): - Required. The instance partition to create. The - instance_partition.name may be omitted, but if specified - must be - ``/instancePartitions/``. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition_id (:class:`str`): - Required. The ID of the instance partition to create. - Valid identifiers are of the form - ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 - characters in length. - - This corresponds to the ``instance_partition_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_partition, instance_partition_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): - request = spanner_instance_admin.CreateInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_partition is not None: - request.instance_partition = instance_partition - if instance_partition_id is not None: - request.instance_partition_id = instance_partition_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - async def delete_instance_partition(self, - request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_partition(request=request) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]]): - The request object. The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - name (:class:`str`): - Required. The name of the instance partition to be - deleted. Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): - request = spanner_instance_admin.DeleteInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_instance_partition(self, - request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, - *, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]]): - The request object. The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - instance_partition (:class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition`): - Required. The instance partition to update, which must - always include the instance partition name. Otherwise, - only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] - need be included. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A mask specifying which fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_partition, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): - request = spanner_instance_admin.UpdateInstancePartitionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_partition is not None: - request.instance_partition = instance_partition - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_partition.name", request.instance_partition.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - async def list_instance_partition_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionOperationsAsyncPager: - r"""Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]]): - The request object. The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - parent (:class:`str`): - Required. The parent instance of the instance partition - operations. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager: - The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): - request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instance_partition_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancePartitionOperationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def move_instance(self, - request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - async def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]]): - The request object. The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): - request = spanner_instance_admin.MoveInstanceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.move_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - spanner_instance_admin.MoveInstanceResponse, - metadata_type=spanner_instance_admin.MoveInstanceMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "InstanceAdminAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -__all__ = ( - "InstanceAdminAsyncClient", -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py deleted file mode 100644 index 6f504cdc37..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py +++ /dev/null @@ -1,3849 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import uuid -import warnings - -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import InstanceAdminGrpcTransport -from .transports.grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .transports.rest import InstanceAdminRestTransport - - -class InstanceAdminClientMeta(type): - """Metaclass for the InstanceAdmin client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] - _transport_registry["grpc"] = InstanceAdminGrpcTransport - _transport_registry["grpc_asyncio"] = InstanceAdminGrpcAsyncIOTransport - _transport_registry["rest"] = InstanceAdminRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[InstanceAdminTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class InstanceAdminClient(metaclass=InstanceAdminClientMeta): - """Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "spanner.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "spanner.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - InstanceAdminClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> InstanceAdminTransport: - """Returns the transport used by the client instance. - - Returns: - InstanceAdminTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def instance_path(project: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_config_path(project: str,instance_config: str,) -> str: - """Returns a fully-qualified instance_config string.""" - return "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) - - @staticmethod - def parse_instance_config_path(path: str) -> Dict[str,str]: - """Parses a instance_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instanceConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_partition_path(project: str,instance: str,instance_partition: str,) -> str: - """Returns a fully-qualified instance_partition string.""" - return "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - - @staticmethod - def parse_instance_partition_path(path: str) -> Dict[str,str]: - """Parses a instance_partition path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)/instancePartitions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = InstanceAdminClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = InstanceAdminClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceAdminTransport, Callable[..., InstanceAdminTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the instance admin client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,InstanceAdminTransport,Callable[..., InstanceAdminTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the InstanceAdminTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = InstanceAdminClient._read_environment_variables() - self._client_cert_source = InstanceAdminClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = InstanceAdminClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, InstanceAdminTransport) - if transport_provided: - # transport is a InstanceAdminTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(InstanceAdminTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - InstanceAdminClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[InstanceAdminTransport], Callable[..., InstanceAdminTransport]] = ( - InstanceAdminClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., InstanceAdminTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.spanner.admin.instance_v1.InstanceAdminClient`.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "credentialsType": None, - } - ) - - def list_instance_configs(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigsPager: - r"""Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest, dict]): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - parent (str): - Required. The name of the project for which a list of - supported instance configurations is requested. Values - are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigsRequest): - request = spanner_instance_admin.ListInstanceConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstanceConfigsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance_config(self, - request: Optional[Union[spanner_instance_admin.GetInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstanceConfig: - r"""Gets information about a particular instance - configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest, dict]): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - name (str): - Required. The name of the requested instance - configuration. Values are of the form - ``projects//instanceConfigs/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceConfigRequest): - request = spanner_instance_admin.GetInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance_config(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - instance_config_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest, dict]): - The request object. The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - parent (str): - Required. The name of the project in which to create the - instance configuration. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The ``InstanceConfig`` proto of the - configuration to create. ``instance_config.name`` must - be ``/instanceConfigs/``. - ``instance_config.base_config`` must be a Google-managed - configuration name, e.g. /instanceConfigs/us-east1, - /instanceConfigs/nam3. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_config_id (str): - Required. The ID of the instance configuration to - create. Valid identifiers are of the form - ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and - 64 characters in length. The ``custom-`` prefix is - required to avoid name conflicts with Google-managed - configurations. - - This corresponds to the ``instance_config_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_config, instance_config_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceConfigRequest): - request = spanner_instance_admin.CreateInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_config is not None: - request.instance_config = instance_config - if instance_config_id is not None: - request.instance_config_id = instance_config_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.CreateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - def update_instance_config(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceConfigRequest, dict]] = None, - *, - instance_config: Optional[spanner_instance_admin.InstanceConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest, dict]): - The request object. The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The user instance configuration to update, - which must always include the instance configuration - name. Otherwise, only fields mentioned in - [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] - need be included. To prevent conflicts of concurrent - updates, - [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - can be used. - - This corresponds to the ``instance_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - from being erased accidentally by clients that do not - know about them. Only display_name and labels can be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstanceConfig` A possible configuration for a Cloud Spanner instance. Configurations - define the geographic placement of nodes and their - replication. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceConfigRequest): - request = spanner_instance_admin.UpdateInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_config is not None: - request.instance_config = instance_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_config.name", request.instance_config.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstanceConfig, - metadata_type=spanner_instance_admin.UpdateInstanceConfigMetadata, - ) - - # Done; return the response. - return response - - def delete_instance_config(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_config(request=request) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest, dict]): - The request object. The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - name (str): - Required. The name of the instance configuration to be - deleted. Values are of the form - ``projects//instanceConfigs/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceConfigRequest): - request = spanner_instance_admin.DeleteInstanceConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_instance_config_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstanceConfigOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstanceConfigOperationsPager: - r"""Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest, dict]): - The request object. The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - parent (str): - Required. The project of the instance configuration - operations. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager: - The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstanceConfigOperationsRequest): - request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_config_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstanceConfigOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_instances(self, - request: Optional[Union[spanner_instance_admin.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: - r"""Lists all instances in the given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest, dict]): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - parent (str): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancesRequest): - request = spanner_instance_admin.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_instance_partitions(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionsPager: - r"""Lists all instance partitions for the given instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest, dict]): - The request object. The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - parent (str): - Required. The instance whose instance partitions should - be listed. Values are of the form - ``projects//instances/``. Use - ``{instance} = '-'`` to list instance partitions for all - Instances in a project, e.g., - ``projects/myproject/instances/-``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager: - The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionsRequest): - request = spanner_instance_admin.ListInstancePartitionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_partitions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancePartitionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance(self, - request: Optional[Union[spanner_instance_admin.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.Instance: - r"""Gets information about a particular instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest, dict]): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - name (str): - Required. The name of the requested instance. Values are - of the form ``projects//instances/``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstanceRequest): - request = spanner_instance_admin.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance(self, - request: Optional[Union[spanner_instance_admin.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[spanner_instance_admin.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest, dict]): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - parent (str): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_id (str): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` - and must be between 2 and 64 characters in length. - - This corresponds to the ``instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to create. The name may be - omitted, but if specified must be - ``/instances/``. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_id, instance] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstanceRequest): - request = spanner_instance_admin.CreateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_id is not None: - request.instance_id = instance_id - if instance is not None: - request.instance = instance - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.CreateInstanceMetadata, - ) - - # Done; return the response. - return response - - def update_instance(self, - request: Optional[Union[spanner_instance_admin.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[spanner_instance_admin.Instance] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest, dict]): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to update, which must always - include the instance name. Otherwise, only fields - mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] - should be updated. The field mask must always be - specified; this prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.spanner_admin_instance_v1.types.Instance` - An isolated set of Cloud Spanner resources on which - databases can be hosted. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstanceRequest): - request = spanner_instance_admin.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.Instance, - metadata_type=spanner_instance_admin.UpdateInstanceMetadata, - ) - - # Done; return the response. - return response - - def delete_instance(self, - request: Optional[Union[spanner_instance_admin.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - client.delete_instance(request=request) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest, dict]): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - name (str): - Required. The name of the instance to be deleted. Values - are of the form - ``projects//instances/`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstanceRequest): - request = spanner_instance_admin.DeleteInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (MutableSequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance_partition(self, - request: Optional[Union[spanner_instance_admin.GetInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> spanner_instance_admin.InstancePartition: - r"""Gets information about a particular instance - partition. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_partition(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest, dict]): - The request object. The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - name (str): - Required. The name of the requested instance partition. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.types.InstancePartition: - An isolated set of Cloud Spanner - resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.GetInstancePartitionRequest): - request = spanner_instance_admin.GetInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_instance_partition(self, - request: Optional[Union[spanner_instance_admin.CreateInstancePartitionRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - instance_partition_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest, dict]): - The request object. The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - parent (str): - Required. The name of the instance in which to create - the instance partition. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to create. The - instance_partition.name may be omitted, but if specified - must be - ``/instancePartitions/``. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - instance_partition_id (str): - Required. The ID of the instance partition to create. - Valid identifiers are of the form - ``[a-z][-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 - characters in length. - - This corresponds to the ``instance_partition_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, instance_partition, instance_partition_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.CreateInstancePartitionRequest): - request = spanner_instance_admin.CreateInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if instance_partition is not None: - request.instance_partition = instance_partition - if instance_partition_id is not None: - request.instance_partition_id = instance_partition_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.CreateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - def delete_instance_partition(self, - request: Optional[Union[spanner_instance_admin.DeleteInstancePartitionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_partition(request=request) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest, dict]): - The request object. The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - name (str): - Required. The name of the instance partition to be - deleted. Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.DeleteInstancePartitionRequest): - request = spanner_instance_admin.DeleteInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_instance_partition(self, - request: Optional[Union[spanner_instance_admin.UpdateInstancePartitionRequest, dict]] = None, - *, - instance_partition: Optional[spanner_instance_admin.InstancePartition] = None, - field_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest, dict]): - The request object. The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to update, which must - always include the instance partition name. Otherwise, - only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] - need be included. - - This corresponds to the ``instance_partition`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - should be updated. The field mask must always be - specified; this prevents any future fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - from being erased accidentally by clients that do not - know about them. - - This corresponds to the ``field_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.InstancePartition` An isolated set of Cloud Spanner resources that databases can define - placements on. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance_partition, field_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.UpdateInstancePartitionRequest): - request = spanner_instance_admin.UpdateInstancePartitionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance_partition is not None: - request.instance_partition = instance_partition - if field_mask is not None: - request.field_mask = field_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance_partition] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance_partition.name", request.instance_partition.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.InstancePartition, - metadata_type=spanner_instance_admin.UpdateInstancePartitionMetadata, - ) - - # Done; return the response. - return response - - def list_instance_partition_operations(self, - request: Optional[Union[spanner_instance_admin.ListInstancePartitionOperationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancePartitionOperationsPager: - r"""Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest, dict]): - The request object. The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - parent (str): - Required. The parent instance of the instance partition - operations. Values are of the form - ``projects//instances/``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager: - The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.ListInstancePartitionOperationsRequest): - request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instance_partition_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancePartitionOperationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def move_instance(self, - request: Optional[Union[spanner_instance_admin.MoveInstanceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import spanner_admin_instance_v1 - - def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest, dict]): - The request object. The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.spanner_admin_instance_v1.types.MoveInstanceResponse` The response for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, spanner_instance_admin.MoveInstanceRequest): - request = spanner_instance_admin.MoveInstanceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.move_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - spanner_instance_admin.MoveInstanceResponse, - metadata_type=spanner_instance_admin.MoveInstanceMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "InstanceAdminClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - -__all__ = ( - "InstanceAdminClient", -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py deleted file mode 100644 index bde9847337..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/pagers.py +++ /dev/null @@ -1,723 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.longrunning import operations_pb2 # type: ignore - - -class ListInstanceConfigsPager: - """A pager for iterating through ``list_instance_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instance_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstanceConfigs`` requests and continue to iterate - through the ``instance_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstanceConfigsResponse], - request: spanner_instance_admin.ListInstanceConfigsRequest, - response: spanner_instance_admin.ListInstanceConfigsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.InstanceConfig]: - for page in self.pages: - yield from page.instance_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigsAsyncPager: - """A pager for iterating through ``list_instance_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instance_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstanceConfigs`` requests and continue to iterate - through the ``instance_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]], - request: spanner_instance_admin.ListInstanceConfigsRequest, - response: spanner_instance_admin.ListInstanceConfigsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstanceConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.instance_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigOperationsPager: - """A pager for iterating through ``list_instance_config_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstanceConfigOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstanceConfigOperationsResponse], - request: spanner_instance_admin.ListInstanceConfigOperationsRequest, - response: spanner_instance_admin.ListInstanceConfigOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstanceConfigOperationsAsyncPager: - """A pager for iterating through ``list_instance_config_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstanceConfigOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]], - request: spanner_instance_admin.ListInstanceConfigOperationsRequest, - response: spanner_instance_admin.ListInstanceConfigOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstanceConfigOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstanceConfigOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancesResponse], - request: spanner_instance_admin.ListInstancesRequest, - response: spanner_instance_admin.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.Instance]: - for page in self.pages: - yield from page.instances - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesAsyncPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancesResponse]], - request: spanner_instance_admin.ListInstancesRequest, - response: spanner_instance_admin.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.Instance]: - async def async_generator(): - async for page in self.pages: - for response in page.instances: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionsPager: - """A pager for iterating through ``list_instance_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instance_partitions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstancePartitions`` requests and continue to iterate - through the ``instance_partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancePartitionsResponse], - request: spanner_instance_admin.ListInstancePartitionsRequest, - response: spanner_instance_admin.ListInstancePartitionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[spanner_instance_admin.InstancePartition]: - for page in self.pages: - yield from page.instance_partitions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionsAsyncPager: - """A pager for iterating through ``list_instance_partitions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instance_partitions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstancePartitions`` requests and continue to iterate - through the ``instance_partitions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]], - request: spanner_instance_admin.ListInstancePartitionsRequest, - response: spanner_instance_admin.ListInstancePartitionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[spanner_instance_admin.InstancePartition]: - async def async_generator(): - async for page in self.pages: - for response in page.instance_partitions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionOperationsPager: - """A pager for iterating through ``list_instance_partition_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstancePartitionOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., spanner_instance_admin.ListInstancePartitionOperationsResponse], - request: spanner_instance_admin.ListInstancePartitionOperationsRequest, - response: spanner_instance_admin.ListInstancePartitionOperationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[operations_pb2.Operation]: - for page in self.pages: - yield from page.operations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancePartitionOperationsAsyncPager: - """A pager for iterating through ``list_instance_partition_operations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``operations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstancePartitionOperations`` requests and continue to iterate - through the ``operations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]], - request: spanner_instance_admin.ListInstancePartitionOperationsRequest, - response: spanner_instance_admin.ListInstancePartitionOperationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest): - The initial request object. - response (google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = spanner_instance_admin.ListInstancePartitionOperationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[spanner_instance_admin.ListInstancePartitionOperationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: - async def async_generator(): - async for page in self.pages: - for response in page.operations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst deleted file mode 100644 index 762ac0c765..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`InstanceAdminTransport` is the ABC for all transports. -- public child `InstanceAdminGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `InstanceAdminGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseInstanceAdminRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `InstanceAdminRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py deleted file mode 100644 index 1892e6a551..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import InstanceAdminTransport -from .grpc import InstanceAdminGrpcTransport -from .grpc_asyncio import InstanceAdminGrpcAsyncIOTransport -from .rest import InstanceAdminRestTransport -from .rest import InstanceAdminRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[InstanceAdminTransport]] -_transport_registry['grpc'] = InstanceAdminGrpcTransport -_transport_registry['grpc_asyncio'] = InstanceAdminGrpcAsyncIOTransport -_transport_registry['rest'] = InstanceAdminRestTransport - -__all__ = ( - 'InstanceAdminTransport', - 'InstanceAdminGrpcTransport', - 'InstanceAdminGrpcAsyncIOTransport', - 'InstanceAdminRestTransport', - 'InstanceAdminRestInterceptor', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py deleted file mode 100644 index cac35e8288..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py +++ /dev/null @@ -1,567 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.spanner_admin_instance_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore -import google.protobuf - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class InstanceAdminTransport(abc.ABC): - """Abstract transport class for InstanceAdmin.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', - ) - - DEFAULT_HOST: str = 'spanner.googleapis.com' - - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_instance_configs: gapic_v1.method.wrap_method( - self.list_instance_configs, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_instance_config: gapic_v1.method.wrap_method( - self.get_instance_config, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance_config: gapic_v1.method.wrap_method( - self.create_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_config: gapic_v1.method.wrap_method( - self.update_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_config: gapic_v1.method.wrap_method( - self.delete_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_config_operations: gapic_v1.method.wrap_method( - self.list_instance_config_operations, - default_timeout=None, - client_info=client_info, - ), - self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_instance_partitions: gapic_v1.method.wrap_method( - self.list_instance_partitions, - default_timeout=None, - client_info=client_info, - ), - self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.get_instance_partition: gapic_v1.method.wrap_method( - self.get_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.create_instance_partition: gapic_v1.method.wrap_method( - self.create_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_partition: gapic_v1.method.wrap_method( - self.delete_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_partition: gapic_v1.method.wrap_method( - self.update_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_partition_operations: gapic_v1.method.wrap_method( - self.list_instance_partition_operations, - default_timeout=None, - client_info=client_info, - ), - self.move_instance: gapic_v1.method.wrap_method( - self.move_instance, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - Union[ - spanner_instance_admin.ListInstanceConfigsResponse, - Awaitable[spanner_instance_admin.ListInstanceConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - Union[ - spanner_instance_admin.InstanceConfig, - Awaitable[spanner_instance_admin.InstanceConfig] - ]]: - raise NotImplementedError() - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - Union[ - spanner_instance_admin.ListInstanceConfigOperationsResponse, - Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - Union[ - spanner_instance_admin.ListInstancesResponse, - Awaitable[spanner_instance_admin.ListInstancesResponse] - ]]: - raise NotImplementedError() - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - Union[ - spanner_instance_admin.ListInstancePartitionsResponse, - Awaitable[spanner_instance_admin.ListInstancePartitionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - Union[ - spanner_instance_admin.Instance, - Awaitable[spanner_instance_admin.Instance] - ]]: - raise NotImplementedError() - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - Union[ - spanner_instance_admin.InstancePartition, - Awaitable[spanner_instance_admin.InstancePartition] - ]]: - raise NotImplementedError() - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - Union[ - spanner_instance_admin.ListInstancePartitionOperationsResponse, - Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse] - ]]: - raise NotImplementedError() - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'InstanceAdminTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py deleted file mode 100644 index 0319e519cc..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py +++ /dev/null @@ -1,1359 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class InstanceAdminGrpcTransport(InstanceAdminTransport): - """gRPC backend transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - spanner_instance_admin.ListInstanceConfigsResponse]: - r"""Return a callable for the list instance configs method over gRPC. - - Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - Returns: - Callable[[~.ListInstanceConfigsRequest], - ~.ListInstanceConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_configs' not in self._stubs: - self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', - request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, - ) - return self._stubs['list_instance_configs'] - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - spanner_instance_admin.InstanceConfig]: - r"""Return a callable for the get instance config method over gRPC. - - Gets information about a particular instance - configuration. - - Returns: - Callable[[~.GetInstanceConfigRequest], - ~.InstanceConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_config' not in self._stubs: - self._stubs['get_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', - request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, - response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, - ) - return self._stubs['get_instance_config'] - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance config method over gRPC. - - Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - Returns: - Callable[[~.CreateInstanceConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_config' not in self._stubs: - self._stubs['create_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', - request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_config'] - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance config method over gRPC. - - Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.UpdateInstanceConfigRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_config' not in self._stubs: - self._stubs['update_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', - request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_config'] - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance config method over gRPC. - - Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.DeleteInstanceConfigRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_config' not in self._stubs: - self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', - request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_config'] - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - spanner_instance_admin.ListInstanceConfigOperationsResponse]: - r"""Return a callable for the list instance config - operations method over gRPC. - - Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Returns: - Callable[[~.ListInstanceConfigOperationsRequest], - ~.ListInstanceConfigOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_config_operations' not in self._stubs: - self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', - request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, - ) - return self._stubs['list_instance_config_operations'] - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - spanner_instance_admin.ListInstancesResponse]: - r"""Return a callable for the list instances method over gRPC. - - Lists all instances in the given project. - - Returns: - Callable[[~.ListInstancesRequest], - ~.ListInstancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', - request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - spanner_instance_admin.ListInstancePartitionsResponse]: - r"""Return a callable for the list instance partitions method over gRPC. - - Lists all instance partitions for the given instance. - - Returns: - Callable[[~.ListInstancePartitionsRequest], - ~.ListInstancePartitionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partitions' not in self._stubs: - self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', - request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, - ) - return self._stubs['list_instance_partitions'] - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - spanner_instance_admin.Instance]: - r"""Return a callable for the get instance method over gRPC. - - Gets information about a particular instance. - - Returns: - Callable[[~.GetInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', - request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, - response_deserializer=spanner_instance_admin.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance method over gRPC. - - Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Returns: - Callable[[~.CreateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', - request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance'] - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance method over gRPC. - - Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Returns: - Callable[[~.UpdateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', - request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Returns: - Callable[[~.DeleteInstanceRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', - request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - spanner_instance_admin.InstancePartition]: - r"""Return a callable for the get instance partition method over gRPC. - - Gets information about a particular instance - partition. - - Returns: - Callable[[~.GetInstancePartitionRequest], - ~.InstancePartition]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_partition' not in self._stubs: - self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', - request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, - response_deserializer=spanner_instance_admin.InstancePartition.deserialize, - ) - return self._stubs['get_instance_partition'] - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - operations_pb2.Operation]: - r"""Return a callable for the create instance partition method over gRPC. - - Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Returns: - Callable[[~.CreateInstancePartitionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_partition' not in self._stubs: - self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', - request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_partition'] - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete instance partition method over gRPC. - - Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.DeleteInstancePartitionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_partition' not in self._stubs: - self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', - request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_partition'] - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance partition method over gRPC. - - Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.UpdateInstancePartitionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_partition' not in self._stubs: - self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', - request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_partition'] - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - spanner_instance_admin.ListInstancePartitionOperationsResponse]: - r"""Return a callable for the list instance partition - operations method over gRPC. - - Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - Returns: - Callable[[~.ListInstancePartitionOperationsRequest], - ~.ListInstancePartitionOperationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partition_operations' not in self._stubs: - self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', - request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, - ) - return self._stubs['list_instance_partition_operations'] - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the move instance method over gRPC. - - Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - Returns: - Callable[[~.MoveInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'move_instance' not in self._stubs: - self._stubs['move_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', - request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['move_instance'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'InstanceAdminGrpcTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py deleted file mode 100644 index 01364ea833..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py +++ /dev/null @@ -1,1560 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO -from .grpc import InstanceAdminGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class InstanceAdminGrpcAsyncIOTransport(InstanceAdminTransport): - """gRPC AsyncIO backend transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be - removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - This argument will be removed in the next major version of this library. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - Awaitable[spanner_instance_admin.ListInstanceConfigsResponse]]: - r"""Return a callable for the list instance configs method over gRPC. - - Lists the supported instance configurations for a - given project. - Returns both Google-managed configurations and - user-managed configurations. - - Returns: - Callable[[~.ListInstanceConfigsRequest], - Awaitable[~.ListInstanceConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_configs' not in self._stubs: - self._stubs['list_instance_configs'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs', - request_serializer=spanner_instance_admin.ListInstanceConfigsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigsResponse.deserialize, - ) - return self._stubs['list_instance_configs'] - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - Awaitable[spanner_instance_admin.InstanceConfig]]: - r"""Return a callable for the get instance config method over gRPC. - - Gets information about a particular instance - configuration. - - Returns: - Callable[[~.GetInstanceConfigRequest], - Awaitable[~.InstanceConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_config' not in self._stubs: - self._stubs['get_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig', - request_serializer=spanner_instance_admin.GetInstanceConfigRequest.serialize, - response_deserializer=spanner_instance_admin.InstanceConfig.deserialize, - ) - return self._stubs['get_instance_config'] - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance config method over gRPC. - - Creates an instance configuration and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance configuration. The - instance configuration name is assigned by the caller. If the - named instance configuration already exists, - ``CreateInstanceConfig`` returns ``ALREADY_EXISTS``. - - Immediately after the request returns: - - - The instance configuration is readable via the API, with all - requested attributes. The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. Its state is ``CREATING``. - - While the operation is pending: - - - Cancelling the operation renders the instance configuration - immediately unreadable via the API. - - Except for deleting the creating resource, all other attempts - to modify the instance configuration are rejected. - - Upon completion of the returned operation: - - - Instances can be created using the instance configuration. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. Its state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track creation of the instance configuration. The - metadata field type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.create`` - permission on the resource - [parent][google.spanner.admin.instance.v1.CreateInstanceConfigRequest.parent]. - - Returns: - Callable[[~.CreateInstanceConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_config' not in self._stubs: - self._stubs['create_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstanceConfig', - request_serializer=spanner_instance_admin.CreateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_config'] - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance config method over gRPC. - - Updates an instance configuration. The returned long-running - operation can be used to track the progress of updating the - instance. If the named instance configuration does not exist, - returns ``NOT_FOUND``. - - Only user-managed configurations can be updated. - - Immediately after the request returns: - - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field is set to true. - - While the operation is pending: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata.cancel_time]. - The operation is guaranteed to succeed at undoing all changes, - after which point it terminates with a ``CANCELLED`` status. - - All other attempts to modify the instance configuration are - rejected. - - Reading the instance configuration via the API continues to - give the pre-request values. - - Upon completion of the returned operation: - - - Creating instances using the instance configuration uses the - new values. - - The new values of the instance configuration are readable via - the API. - - The instance configuration's - [reconciling][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - field becomes false. - - The returned long-running operation will have a name of the - format ``/operations/`` and - can be used to track the instance configuration modification. - The metadata field type is - [UpdateInstanceConfigMetadata][google.spanner.admin.instance.v1.UpdateInstanceConfigMetadata]. - The response field type is - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig], - if successful. - - Authorization requires ``spanner.instanceConfigs.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.UpdateInstanceConfigRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_config' not in self._stubs: - self._stubs['update_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstanceConfig', - request_serializer=spanner_instance_admin.UpdateInstanceConfigRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_config'] - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance config method over gRPC. - - Deletes the instance configuration. Deletion is only allowed - when no instances are using the configuration. If any instances - are using the configuration, returns ``FAILED_PRECONDITION``. - - Only user-managed configurations can be deleted. - - Authorization requires ``spanner.instanceConfigs.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstanceConfig.name]. - - Returns: - Callable[[~.DeleteInstanceConfigRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_config' not in self._stubs: - self._stubs['delete_instance_config'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstanceConfig', - request_serializer=spanner_instance_admin.DeleteInstanceConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_config'] - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - Awaitable[spanner_instance_admin.ListInstanceConfigOperationsResponse]]: - r"""Return a callable for the list instance config - operations method over gRPC. - - Lists the user-managed instance configuration long-running - operations in the given project. An instance configuration - operation has a name of the form - ``projects//instanceConfigs//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Returns: - Callable[[~.ListInstanceConfigOperationsRequest], - Awaitable[~.ListInstanceConfigOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_config_operations' not in self._stubs: - self._stubs['list_instance_config_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigOperations', - request_serializer=spanner_instance_admin.ListInstanceConfigOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstanceConfigOperationsResponse.deserialize, - ) - return self._stubs['list_instance_config_operations'] - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - Awaitable[spanner_instance_admin.ListInstancesResponse]]: - r"""Return a callable for the list instances method over gRPC. - - Lists all instances in the given project. - - Returns: - Callable[[~.ListInstancesRequest], - Awaitable[~.ListInstancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances', - request_serializer=spanner_instance_admin.ListInstancesRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - Awaitable[spanner_instance_admin.ListInstancePartitionsResponse]]: - r"""Return a callable for the list instance partitions method over gRPC. - - Lists all instance partitions for the given instance. - - Returns: - Callable[[~.ListInstancePartitionsRequest], - Awaitable[~.ListInstancePartitionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partitions' not in self._stubs: - self._stubs['list_instance_partitions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitions', - request_serializer=spanner_instance_admin.ListInstancePartitionsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionsResponse.deserialize, - ) - return self._stubs['list_instance_partitions'] - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - Awaitable[spanner_instance_admin.Instance]]: - r"""Return a callable for the get instance method over gRPC. - - Gets information about a particular instance. - - Returns: - Callable[[~.GetInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance', - request_serializer=spanner_instance_admin.GetInstanceRequest.serialize, - response_deserializer=spanner_instance_admin.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance method over gRPC. - - Creates an instance and begins preparing it to begin serving. - The returned long-running operation can be used to track the - progress of preparing the new instance. The instance name is - assigned by the caller. If the named instance already exists, - ``CreateInstance`` returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance is readable via the API, with all requested - attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance immediately - unreadable via the API. - - The instance can be deleted. - - All other attempts to modify the instance are rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can be created in the instance. - - The instance's allocated resource levels are readable via the - API. - - The instance's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track creation of the instance. The metadata field type - is - [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Returns: - Callable[[~.CreateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance', - request_serializer=spanner_instance_admin.CreateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance'] - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance method over gRPC. - - Updates an instance, and begins allocating or releasing - resources as requested. The returned long-running operation can - be used to track the progress of updating the instance. If the - named instance does not exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance's - allocation has been requested, billing is based on the - newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance are rejected. - - Reading the instance via the API continues to give the - pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance's tables. - - The instance's new resource levels are readable via the API. - - The returned long-running operation will have a name of the - format ``/operations/`` and can be - used to track the instance modification. The metadata field type - is - [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. - - Authorization requires ``spanner.instances.update`` permission - on the resource - [name][google.spanner.admin.instance.v1.Instance.name]. - - Returns: - Callable[[~.UpdateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance', - request_serializer=spanner_instance_admin.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance method over gRPC. - - Deletes an instance. - - Immediately upon completion of the request: - - - Billing ceases for all of the instance's reserved resources. - - Soon afterward: - - - The instance and *all of its databases* immediately and - irrevocably disappear from the API. All data in the databases - is permanently deleted. - - Returns: - Callable[[~.DeleteInstanceRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance', - request_serializer=spanner_instance_admin.DeleteInstanceRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on an instance resource. Replaces - any existing policy. - - Authorization requires ``spanner.instances.setIamPolicy`` on - [resource][google.iam.v1.SetIamPolicyRequest.resource]. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for an instance resource. Returns - an empty policy if an instance exists but does not have a policy - set. - - Authorization requires ``spanner.instances.getIamPolicy`` on - [resource][google.iam.v1.GetIamPolicyRequest.resource]. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that the caller has on the specified - instance resource. - - Attempting this RPC on a non-existent Cloud Spanner instance - resource will result in a NOT_FOUND error if the user has - ``spanner.instances.list`` permission on the containing Google - Cloud Project. Otherwise returns an empty set of permissions. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - Awaitable[spanner_instance_admin.InstancePartition]]: - r"""Return a callable for the get instance partition method over gRPC. - - Gets information about a particular instance - partition. - - Returns: - Callable[[~.GetInstancePartitionRequest], - Awaitable[~.InstancePartition]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance_partition' not in self._stubs: - self._stubs['get_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/GetInstancePartition', - request_serializer=spanner_instance_admin.GetInstancePartitionRequest.serialize, - response_deserializer=spanner_instance_admin.InstancePartition.deserialize, - ) - return self._stubs['get_instance_partition'] - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create instance partition method over gRPC. - - Creates an instance partition and begins preparing it to be - used. The returned long-running operation can be used to track - the progress of preparing the new instance partition. The - instance partition name is assigned by the caller. If the named - instance partition already exists, ``CreateInstancePartition`` - returns ``ALREADY_EXISTS``. - - Immediately upon completion of this request: - - - The instance partition is readable via the API, with all - requested attributes but no allocated resources. Its state is - ``CREATING``. - - Until completion of the returned operation: - - - Cancelling the operation renders the instance partition - immediately unreadable via the API. - - The instance partition can be deleted. - - All other attempts to modify the instance partition are - rejected. - - Upon completion of the returned operation: - - - Billing for all successfully-allocated resources begins (some - types may have lower than the requested levels). - - Databases can start using this instance partition. - - The instance partition's allocated resource levels are - readable via the API. - - The instance partition's state becomes ``READY``. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track creation of the instance partition. The - metadata field type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Returns: - Callable[[~.CreateInstancePartitionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_instance_partition' not in self._stubs: - self._stubs['create_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstancePartition', - request_serializer=spanner_instance_admin.CreateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_instance_partition'] - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete instance partition method over gRPC. - - Deletes an existing instance partition. Requires that the - instance partition is not used by any database or backup and is - not the default instance partition of an instance. - - Authorization requires ``spanner.instancePartitions.delete`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.DeleteInstancePartitionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_instance_partition' not in self._stubs: - self._stubs['delete_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstancePartition', - request_serializer=spanner_instance_admin.DeleteInstancePartitionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_instance_partition'] - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance partition method over gRPC. - - Updates an instance partition, and begins allocating or - releasing resources as requested. The returned long-running - operation can be used to track the progress of updating the - instance partition. If the named instance partition does not - exist, returns ``NOT_FOUND``. - - Immediately upon completion of this request: - - - For resource types for which a decrease in the instance - partition's allocation has been requested, billing is based on - the newly-requested level. - - Until completion of the returned operation: - - - Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata.cancel_time], - and begins restoring resources to their pre-request values. - The operation is guaranteed to succeed at undoing all resource - changes, after which point it terminates with a ``CANCELLED`` - status. - - All other attempts to modify the instance partition are - rejected. - - Reading the instance partition via the API continues to give - the pre-request resource levels. - - Upon completion of the returned operation: - - - Billing begins for all successfully-allocated resources (some - types may have lower than the requested levels). - - All newly-reserved resources are available for serving the - instance partition's tables. - - The instance partition's new resource levels are readable via - the API. - - The returned long-running operation will have a name of the - format ``/operations/`` - and can be used to track the instance partition modification. - The metadata field type is - [UpdateInstancePartitionMetadata][google.spanner.admin.instance.v1.UpdateInstancePartitionMetadata]. - The response field type is - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition], - if successful. - - Authorization requires ``spanner.instancePartitions.update`` - permission on the resource - [name][google.spanner.admin.instance.v1.InstancePartition.name]. - - Returns: - Callable[[~.UpdateInstancePartitionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance_partition' not in self._stubs: - self._stubs['update_instance_partition'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstancePartition', - request_serializer=spanner_instance_admin.UpdateInstancePartitionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance_partition'] - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - Awaitable[spanner_instance_admin.ListInstancePartitionOperationsResponse]]: - r"""Return a callable for the list instance partition - operations method over gRPC. - - Lists instance partition long-running operations in the given - instance. An instance partition operation has a name of the form - ``projects//instances//instancePartitions//operations/``. - The long-running operation metadata field type - ``metadata.type_url`` describes the type of the metadata. - Operations returned include those that have - completed/failed/canceled within the last 7 days, and pending - operations. Operations returned are ordered by - ``operation.metadata.value.start_time`` in descending order - starting from the most recently started operation. - - Authorization requires - ``spanner.instancePartitionOperations.list`` permission on the - resource - [parent][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.parent]. - - Returns: - Callable[[~.ListInstancePartitionOperationsRequest], - Awaitable[~.ListInstancePartitionOperationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instance_partition_operations' not in self._stubs: - self._stubs['list_instance_partition_operations'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/ListInstancePartitionOperations', - request_serializer=spanner_instance_admin.ListInstancePartitionOperationsRequest.serialize, - response_deserializer=spanner_instance_admin.ListInstancePartitionOperationsResponse.deserialize, - ) - return self._stubs['list_instance_partition_operations'] - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the move instance method over gRPC. - - Moves an instance to the target instance configuration. You can - use the returned long-running operation to track the progress of - moving the instance. - - ``MoveInstance`` returns ``FAILED_PRECONDITION`` if the instance - meets any of the following criteria: - - - Is undergoing a move to a different instance configuration - - Has backups - - Has an ongoing update - - Contains any CMEK-enabled databases - - Is a free trial instance - - While the operation is pending: - - - All other attempts to modify the instance, including changes - to its compute capacity, are rejected. - - - The following database and backup admin operations are - rejected: - - - ``DatabaseAdmin.CreateDatabase`` - - ``DatabaseAdmin.UpdateDatabaseDdl`` (disabled if - default_leader is specified in the request.) - - ``DatabaseAdmin.RestoreDatabase`` - - ``DatabaseAdmin.CreateBackup`` - - ``DatabaseAdmin.CopyBackup`` - - - Both the source and target instance configurations are subject - to hourly compute and storage charges. - - - The instance might experience higher read-write latencies and - a higher transaction abort rate. However, moving an instance - doesn't cause any downtime. - - The returned long-running operation has a name of the format - ``/operations/`` and can be used to - track the move instance operation. The metadata field type is - [MoveInstanceMetadata][google.spanner.admin.instance.v1.MoveInstanceMetadata]. - The response field type is - [Instance][google.spanner.admin.instance.v1.Instance], if - successful. Cancelling the operation sets its metadata's - [cancel_time][google.spanner.admin.instance.v1.MoveInstanceMetadata.cancel_time]. - Cancellation is not immediate because it involves moving any - data previously moved to the target instance configuration back - to the original instance configuration. You can use this - operation to track the progress of the cancellation. Upon - successful completion of the cancellation, the operation - terminates with ``CANCELLED`` status. - - If not cancelled, upon completion of the returned operation: - - - The instance successfully moves to the target instance - configuration. - - You are billed for compute and storage in target instance - configuration. - - Authorization requires the ``spanner.instances.update`` - permission on the resource - [instance][google.spanner.admin.instance.v1.Instance]. - - For more details, see `Move an - instance `__. - - Returns: - Callable[[~.MoveInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'move_instance' not in self._stubs: - self._stubs['move_instance'] = self._logged_channel.unary_unary( - '/google.spanner.admin.instance.v1.InstanceAdmin/MoveInstance', - request_serializer=spanner_instance_admin.MoveInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['move_instance'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_instance_configs: self._wrap_method( - self.list_instance_configs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.get_instance_config: self._wrap_method( - self.get_instance_config, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance_config: self._wrap_method( - self.create_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_config: self._wrap_method( - self.update_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_config: self._wrap_method( - self.delete_instance_config, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_config_operations: self._wrap_method( - self.list_instance_config_operations, - default_timeout=None, - client_info=client_info, - ), - self.list_instances: self._wrap_method( - self.list_instances, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.list_instance_partitions: self._wrap_method( - self.list_instance_partitions, - default_timeout=None, - client_info=client_info, - ), - self.get_instance: self._wrap_method( - self.get_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.create_instance: self._wrap_method( - self.create_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.update_instance: self._wrap_method( - self.update_instance, - default_timeout=3600.0, - client_info=client_info, - ), - self.delete_instance: self._wrap_method( - self.delete_instance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=30.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=32.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=30.0, - client_info=client_info, - ), - self.get_instance_partition: self._wrap_method( - self.get_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.create_instance_partition: self._wrap_method( - self.create_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.delete_instance_partition: self._wrap_method( - self.delete_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.update_instance_partition: self._wrap_method( - self.update_instance_partition, - default_timeout=None, - client_info=client_info, - ), - self.list_instance_partition_operations: self._wrap_method( - self.list_instance_partition_operations, - default_timeout=None, - client_info=client_info, - ), - self.move_instance: self._wrap_method( - self.move_instance, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'InstanceAdminGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py deleted file mode 100644 index 1d9124d35d..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py +++ /dev/null @@ -1,4462 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 -import google.protobuf - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseInstanceAdminRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER - DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ - - -class InstanceAdminRestInterceptor: - """Interceptor for InstanceAdmin. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the InstanceAdminRestTransport. - - .. code-block:: python - class MyCustomInstanceAdminInterceptor(InstanceAdminRestInterceptor): - def pre_create_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_instance_partition(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance_partition(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_config_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_config_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_configs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_configs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_partition_operations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_partition_operations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instance_partitions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instance_partitions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instances(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instances(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_move_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_move_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance_partition(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance_partition(self, response): - logging.log(f"Received response: {response}") - return response - - transport = InstanceAdminRestTransport(interceptor=MyCustomInstanceAdminInterceptor()) - client = InstanceAdminClient(transport=transport) - - - """ - def pre_create_instance(self, request: spanner_instance_admin.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance - - DEPRECATED. Please use the `post_create_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_create_instance` interceptor runs - before the `post_create_instance_with_metadata` interceptor. - """ - return response - - def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_create_instance_with_metadata` - interceptor in new development instead of the `post_create_instance` interceptor. - When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the - `post_create_instance` interceptor. The (possibly modified) response returned by - `post_create_instance` will be passed to - `post_create_instance_with_metadata`. - """ - return response, metadata - - def pre_create_instance_config(self, request: spanner_instance_admin.CreateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_create_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance_config - - DEPRECATED. Please use the `post_create_instance_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_create_instance_config` interceptor runs - before the `post_create_instance_config_with_metadata` interceptor. - """ - return response - - def post_create_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_instance_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_create_instance_config_with_metadata` - interceptor in new development instead of the `post_create_instance_config` interceptor. - When both interceptors are used, this `post_create_instance_config_with_metadata` interceptor runs after the - `post_create_instance_config` interceptor. The (possibly modified) response returned by - `post_create_instance_config` will be passed to - `post_create_instance_config_with_metadata`. - """ - return response, metadata - - def pre_create_instance_partition(self, request: spanner_instance_admin.CreateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.CreateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_create_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_instance_partition - - DEPRECATED. Please use the `post_create_instance_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_create_instance_partition` interceptor runs - before the `post_create_instance_partition_with_metadata` interceptor. - """ - return response - - def post_create_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_instance_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_create_instance_partition_with_metadata` - interceptor in new development instead of the `post_create_instance_partition` interceptor. - When both interceptors are used, this `post_create_instance_partition_with_metadata` interceptor runs after the - `post_create_instance_partition` interceptor. The (possibly modified) response returned by - `post_create_instance_partition` will be passed to - `post_create_instance_partition_with_metadata`. - """ - return response, metadata - - def pre_delete_instance(self, request: spanner_instance_admin.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def pre_delete_instance_config(self, request: spanner_instance_admin.DeleteInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def pre_delete_instance_partition(self, request: spanner_instance_admin.DeleteInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.DeleteInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_get_instance(self, request: spanner_instance_admin.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_instance(self, response: spanner_instance_admin.Instance) -> spanner_instance_admin.Instance: - """Post-rpc interceptor for get_instance - - DEPRECATED. Please use the `post_get_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_instance` interceptor runs - before the `post_get_instance_with_metadata` interceptor. - """ - return response - - def post_get_instance_with_metadata(self, response: spanner_instance_admin.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_instance_with_metadata` - interceptor in new development instead of the `post_get_instance` interceptor. - When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the - `post_get_instance` interceptor. The (possibly modified) response returned by - `post_get_instance` will be passed to - `post_get_instance_with_metadata`. - """ - return response, metadata - - def pre_get_instance_config(self, request: spanner_instance_admin.GetInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_instance_config(self, response: spanner_instance_admin.InstanceConfig) -> spanner_instance_admin.InstanceConfig: - """Post-rpc interceptor for get_instance_config - - DEPRECATED. Please use the `post_get_instance_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_instance_config` interceptor runs - before the `post_get_instance_config_with_metadata` interceptor. - """ - return response - - def post_get_instance_config_with_metadata(self, response: spanner_instance_admin.InstanceConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstanceConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_instance_config_with_metadata` - interceptor in new development instead of the `post_get_instance_config` interceptor. - When both interceptors are used, this `post_get_instance_config_with_metadata` interceptor runs after the - `post_get_instance_config` interceptor. The (possibly modified) response returned by - `post_get_instance_config` will be passed to - `post_get_instance_config_with_metadata`. - """ - return response, metadata - - def pre_get_instance_partition(self, request: spanner_instance_admin.GetInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.GetInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_instance_partition(self, response: spanner_instance_admin.InstancePartition) -> spanner_instance_admin.InstancePartition: - """Post-rpc interceptor for get_instance_partition - - DEPRECATED. Please use the `post_get_instance_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_get_instance_partition` interceptor runs - before the `post_get_instance_partition_with_metadata` interceptor. - """ - return response - - def post_get_instance_partition_with_metadata(self, response: spanner_instance_admin.InstancePartition, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.InstancePartition, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_get_instance_partition_with_metadata` - interceptor in new development instead of the `post_get_instance_partition` interceptor. - When both interceptors are used, this `post_get_instance_partition_with_metadata` interceptor runs after the - `post_get_instance_partition` interceptor. The (possibly modified) response returned by - `post_get_instance_partition` will be passed to - `post_get_instance_partition_with_metadata`. - """ - return response, metadata - - def pre_list_instance_config_operations(self, request: spanner_instance_admin.ListInstanceConfigOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_config_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_config_operations(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: - """Post-rpc interceptor for list_instance_config_operations - - DEPRECATED. Please use the `post_list_instance_config_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_config_operations` interceptor runs - before the `post_list_instance_config_operations_with_metadata` interceptor. - """ - return response - - def post_list_instance_config_operations_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_config_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_config_operations_with_metadata` - interceptor in new development instead of the `post_list_instance_config_operations` interceptor. - When both interceptors are used, this `post_list_instance_config_operations_with_metadata` interceptor runs after the - `post_list_instance_config_operations` interceptor. The (possibly modified) response returned by - `post_list_instance_config_operations` will be passed to - `post_list_instance_config_operations_with_metadata`. - """ - return response, metadata - - def pre_list_instance_configs(self, request: spanner_instance_admin.ListInstanceConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_configs - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_configs(self, response: spanner_instance_admin.ListInstanceConfigsResponse) -> spanner_instance_admin.ListInstanceConfigsResponse: - """Post-rpc interceptor for list_instance_configs - - DEPRECATED. Please use the `post_list_instance_configs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_configs` interceptor runs - before the `post_list_instance_configs_with_metadata` interceptor. - """ - return response - - def post_list_instance_configs_with_metadata(self, response: spanner_instance_admin.ListInstanceConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstanceConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_configs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_configs_with_metadata` - interceptor in new development instead of the `post_list_instance_configs` interceptor. - When both interceptors are used, this `post_list_instance_configs_with_metadata` interceptor runs after the - `post_list_instance_configs` interceptor. The (possibly modified) response returned by - `post_list_instance_configs` will be passed to - `post_list_instance_configs_with_metadata`. - """ - return response, metadata - - def pre_list_instance_partition_operations(self, request: spanner_instance_admin.ListInstancePartitionOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_partition_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_partition_operations(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: - """Post-rpc interceptor for list_instance_partition_operations - - DEPRECATED. Please use the `post_list_instance_partition_operations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_partition_operations` interceptor runs - before the `post_list_instance_partition_operations_with_metadata` interceptor. - """ - return response - - def post_list_instance_partition_operations_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionOperationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_partition_operations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_partition_operations_with_metadata` - interceptor in new development instead of the `post_list_instance_partition_operations` interceptor. - When both interceptors are used, this `post_list_instance_partition_operations_with_metadata` interceptor runs after the - `post_list_instance_partition_operations` interceptor. The (possibly modified) response returned by - `post_list_instance_partition_operations` will be passed to - `post_list_instance_partition_operations_with_metadata`. - """ - return response, metadata - - def pre_list_instance_partitions(self, request: spanner_instance_admin.ListInstancePartitionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instance_partitions - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instance_partitions(self, response: spanner_instance_admin.ListInstancePartitionsResponse) -> spanner_instance_admin.ListInstancePartitionsResponse: - """Post-rpc interceptor for list_instance_partitions - - DEPRECATED. Please use the `post_list_instance_partitions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instance_partitions` interceptor runs - before the `post_list_instance_partitions_with_metadata` interceptor. - """ - return response - - def post_list_instance_partitions_with_metadata(self, response: spanner_instance_admin.ListInstancePartitionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancePartitionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instance_partitions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instance_partitions_with_metadata` - interceptor in new development instead of the `post_list_instance_partitions` interceptor. - When both interceptors are used, this `post_list_instance_partitions_with_metadata` interceptor runs after the - `post_list_instance_partitions` interceptor. The (possibly modified) response returned by - `post_list_instance_partitions` will be passed to - `post_list_instance_partitions_with_metadata`. - """ - return response, metadata - - def pre_list_instances(self, request: spanner_instance_admin.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instances - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_instances(self, response: spanner_instance_admin.ListInstancesResponse) -> spanner_instance_admin.ListInstancesResponse: - """Post-rpc interceptor for list_instances - - DEPRECATED. Please use the `post_list_instances_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_list_instances` interceptor runs - before the `post_list_instances_with_metadata` interceptor. - """ - return response - - def post_list_instances_with_metadata(self, response: spanner_instance_admin.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instances - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_list_instances_with_metadata` - interceptor in new development instead of the `post_list_instances` interceptor. - When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the - `post_list_instances` interceptor. The (possibly modified) response returned by - `post_list_instances` will be passed to - `post_list_instances_with_metadata`. - """ - return response, metadata - - def pre_move_instance(self, request: spanner_instance_admin.MoveInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.MoveInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for move_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_move_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for move_instance - - DEPRECATED. Please use the `post_move_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_move_instance` interceptor runs - before the `post_move_instance_with_metadata` interceptor. - """ - return response - - def post_move_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for move_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_move_instance_with_metadata` - interceptor in new development instead of the `post_move_instance` interceptor. - When both interceptors are used, this `post_move_instance_with_metadata` interceptor runs after the - `post_move_instance` interceptor. The (possibly modified) response returned by - `post_move_instance` will be passed to - `post_move_instance_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_instance(self, request: spanner_instance_admin.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance - - DEPRECATED. Please use the `post_update_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_update_instance` interceptor runs - before the `post_update_instance_with_metadata` interceptor. - """ - return response - - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_update_instance_with_metadata` - interceptor in new development instead of the `post_update_instance` interceptor. - When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the - `post_update_instance` interceptor. The (possibly modified) response returned by - `post_update_instance` will be passed to - `post_update_instance_with_metadata`. - """ - return response, metadata - - def pre_update_instance_config(self, request: spanner_instance_admin.UpdateInstanceConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstanceConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_update_instance_config(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance_config - - DEPRECATED. Please use the `post_update_instance_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_update_instance_config` interceptor runs - before the `post_update_instance_config_with_metadata` interceptor. - """ - return response - - def post_update_instance_config_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_update_instance_config_with_metadata` - interceptor in new development instead of the `post_update_instance_config` interceptor. - When both interceptors are used, this `post_update_instance_config_with_metadata` interceptor runs after the - `post_update_instance_config` interceptor. The (possibly modified) response returned by - `post_update_instance_config` will be passed to - `post_update_instance_config_with_metadata`. - """ - return response, metadata - - def pre_update_instance_partition(self, request: spanner_instance_admin.UpdateInstancePartitionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[spanner_instance_admin.UpdateInstancePartitionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance_partition - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_update_instance_partition(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance_partition - - DEPRECATED. Please use the `post_update_instance_partition_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. This `post_update_instance_partition` interceptor runs - before the `post_update_instance_partition_with_metadata` interceptor. - """ - return response - - def post_update_instance_partition_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance_partition - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the InstanceAdmin server but before it is returned to user code. - - We recommend only using this `post_update_instance_partition_with_metadata` - interceptor in new development instead of the `post_update_instance_partition` interceptor. - When both interceptors are used, this `post_update_instance_partition_with_metadata` interceptor runs after the - `post_update_instance_partition` interceptor. The (possibly modified) response returned by - `post_update_instance_partition` will be passed to - `post_update_instance_partition_with_metadata`. - """ - return response, metadata - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the InstanceAdmin server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the InstanceAdmin server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class InstanceAdminRestStub: - _session: AuthorizedSession - _host: str - _interceptor: InstanceAdminRestInterceptor - - -class InstanceAdminRestTransport(_BaseInstanceAdminRestTransport): - """REST backend synchronous transport for InstanceAdmin. - - Cloud Spanner Instance Admin API - - The Cloud Spanner Instance Admin API can be used to create, - delete, modify and list instances. Instances are dedicated Cloud - Spanner serving and storage resources to be used by Cloud - Spanner databases. - - Each instance has a "configuration", which dictates where the - serving resources for the Cloud Spanner instance are located - (e.g., US-central, Europe). Configurations are created by Google - based on resource availability. - - Cloud Spanner billing is based on the instances that exist and - their sizes. After an instance exists, there are no additional - per-database or per-operation charges for use of the instance - (though there may be additional network bandwidth charges). - Instances offer isolation: problems with databases in one - instance will not affect other instances. However, within an - instance databases can affect each other. For example, if one - database in an instance receives a lot of requests and consumes - most of the instance resources, fewer resources are available - for other databases in that instance, and their performance may - suffer. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[InstanceAdminRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): Deprecated. A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. This argument will be - removed in the next major version of this library. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or InstanceAdminRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateInstance(_BaseInstanceAdminRestTransport._BaseCreateInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CreateInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create instance method over HTTP. - - Args: - request (~.spanner_instance_admin.CreateInstanceRequest): - The request object. The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_http_options() - - request, metadata = self._interceptor.pre_create_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateInstanceConfig(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CreateInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.CreateInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.CreateInstanceConfigRequest): - The request object. The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_create_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CreateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_instance_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_config", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstanceConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateInstancePartition(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CreateInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.CreateInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.CreateInstancePartitionRequest): - The request object. The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_create_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CreateInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CreateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_instance_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_instance_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.create_instance_partition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CreateInstancePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteInstance(_BaseInstanceAdminRestTransport._BaseDeleteInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete instance method over HTTP. - - Args: - request (~.spanner_instance_admin.DeleteInstanceRequest): - The request object. The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_http_options() - - request, metadata = self._interceptor.pre_delete_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInstanceConfig(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.DeleteInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.DeleteInstanceConfigRequest): - The request object. The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_delete_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteInstancePartition(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.DeleteInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.DeleteInstancePartitionRequest): - The request object. The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_delete_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetIamPolicy(_BaseInstanceAdminRestTransport._BaseGetIamPolicy, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_iam_policy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstance(_BaseInstanceAdminRestTransport._BaseGetInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.Instance: - r"""Call the get instance method over HTTP. - - Args: - request (~.spanner_instance_admin.GetInstanceRequest): - The request object. The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.Instance: - An isolated set of Cloud Spanner - resources on which databases can be - hosted. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetInstance._get_http_options() - - request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.Instance() - pb_resp = spanner_instance_admin.Instance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.Instance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstanceConfig(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.GetInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.InstanceConfig: - r"""Call the get instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.GetInstanceConfigRequest): - The request object. The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.InstanceConfig: - A possible configuration for a Cloud - Spanner instance. Configurations define - the geographic placement of nodes and - their replication. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_get_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.InstanceConfig() - pb_resp = spanner_instance_admin.InstanceConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.InstanceConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_config", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstanceConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstancePartition(_BaseInstanceAdminRestTransport._BaseGetInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.GetInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.InstancePartition: - r"""Call the get instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.GetInstancePartitionRequest): - The request object. The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.InstancePartition: - An isolated set of Cloud Spanner - resources that databases can define - placements on. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_get_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.InstancePartition() - pb_resp = spanner_instance_admin.InstancePartition.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.InstancePartition.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.get_instance_partition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetInstancePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstanceConfigOperations(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstanceConfigOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstanceConfigOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstanceConfigOperationsResponse: - r"""Call the list instance config - operations method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstanceConfigOperationsRequest): - The request object. The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstanceConfigOperationsResponse: - The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_config_operations(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstanceConfigOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstanceConfigOperationsResponse() - pb_resp = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_config_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_config_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_config_operations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstanceConfigs(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstanceConfigs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstanceConfigsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstanceConfigsResponse: - r"""Call the list instance configs method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstanceConfigsRequest): - The request object. The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstanceConfigsResponse: - The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_configs(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstanceConfigs", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstanceConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstanceConfigsResponse() - pb_resp = spanner_instance_admin.ListInstanceConfigsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_configs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_configs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstanceConfigsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_configs", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstanceConfigs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstancePartitionOperations(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstancePartitionOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstancePartitionOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstancePartitionOperationsResponse: - r"""Call the list instance partition - operations method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstancePartitionOperationsRequest): - The request object. The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstancePartitionOperationsResponse: - The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_partition_operations(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitionOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitionOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstancePartitionOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstancePartitionOperationsResponse() - pb_resp = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_partition_operations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_partition_operations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partition_operations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitionOperations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstancePartitions(_BaseInstanceAdminRestTransport._BaseListInstancePartitions, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstancePartitions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstancePartitionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstancePartitionsResponse: - r"""Call the list instance partitions method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstancePartitionsRequest): - The request object. The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstancePartitionsResponse: - The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_http_options() - - request, metadata = self._interceptor.pre_list_instance_partitions(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstancePartitions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstancePartitions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstancePartitionsResponse() - pb_resp = spanner_instance_admin.ListInstancePartitionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instance_partitions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instance_partitions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstancePartitionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instance_partitions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstancePartitions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstances(_BaseInstanceAdminRestTransport._BaseListInstances, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListInstances") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: spanner_instance_admin.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> spanner_instance_admin.ListInstancesResponse: - r"""Call the list instances method over HTTP. - - Args: - request (~.spanner_instance_admin.ListInstancesRequest): - The request object. The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.spanner_instance_admin.ListInstancesResponse: - The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListInstances._get_http_options() - - request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListInstances._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListInstances", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstances", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = spanner_instance_admin.ListInstancesResponse() - pb_resp = spanner_instance_admin.ListInstancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instances(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = spanner_instance_admin.ListInstancesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.list_instances", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListInstances", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _MoveInstance(_BaseInstanceAdminRestTransport._BaseMoveInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.MoveInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.MoveInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the move instance method over HTTP. - - Args: - request (~.spanner_instance_admin.MoveInstanceRequest): - The request object. The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_http_options() - - request, metadata = self._interceptor.pre_move_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseMoveInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.MoveInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "MoveInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._MoveInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_move_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_move_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.move_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "MoveInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseInstanceAdminRestTransport._BaseSetIamPolicy, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.SetIamPolicy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.set_iam_policy", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseInstanceAdminRestTransport._BaseTestIamPermissions, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.TestIamPermissions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.test_iam_permissions", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstance(_BaseInstanceAdminRestTransport._BaseUpdateInstance, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.UpdateInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance method over HTTP. - - Args: - request (~.spanner_instance_admin.UpdateInstanceRequest): - The request object. The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_http_options() - - request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstanceConfig(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.UpdateInstanceConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.UpdateInstanceConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance config method over HTTP. - - Args: - request (~.spanner_instance_admin.UpdateInstanceConfigRequest): - The request object. The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_http_options() - - request, metadata = self._interceptor.pre_update_instance_config(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstanceConfig", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstanceConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._UpdateInstanceConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_config", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstanceConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstancePartition(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.UpdateInstancePartition") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: spanner_instance_admin.UpdateInstancePartitionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance partition method over HTTP. - - Args: - request (~.spanner_instance_admin.UpdateInstancePartitionRequest): - The request object. The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_http_options() - - request, metadata = self._interceptor.pre_update_instance_partition(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_transcoded_request(http_options, request) - - body = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.UpdateInstancePartition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstancePartition", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._UpdateInstancePartition._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance_partition(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_partition_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminClient.update_instance_partition", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "UpdateInstancePartition", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_instance(self) -> Callable[ - [spanner_instance_admin.CreateInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_instance_config(self) -> Callable[ - [spanner_instance_admin.CreateInstanceConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_instance_partition(self) -> Callable[ - [spanner_instance_admin.CreateInstancePartitionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance_config(self) -> Callable[ - [spanner_instance_admin.DeleteInstanceConfigRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_instance_partition(self) -> Callable[ - [spanner_instance_admin.DeleteInstancePartitionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance(self) -> Callable[ - [spanner_instance_admin.GetInstanceRequest], - spanner_instance_admin.Instance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance_config(self) -> Callable[ - [spanner_instance_admin.GetInstanceConfigRequest], - spanner_instance_admin.InstanceConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance_partition(self) -> Callable[ - [spanner_instance_admin.GetInstancePartitionRequest], - spanner_instance_admin.InstancePartition]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_config_operations(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigOperationsRequest], - spanner_instance_admin.ListInstanceConfigOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstanceConfigOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_configs(self) -> Callable[ - [spanner_instance_admin.ListInstanceConfigsRequest], - spanner_instance_admin.ListInstanceConfigsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstanceConfigs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_partition_operations(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionOperationsRequest], - spanner_instance_admin.ListInstancePartitionOperationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstancePartitionOperations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instance_partitions(self) -> Callable[ - [spanner_instance_admin.ListInstancePartitionsRequest], - spanner_instance_admin.ListInstancePartitionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstancePartitions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instances(self) -> Callable[ - [spanner_instance_admin.ListInstancesRequest], - spanner_instance_admin.ListInstancesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore - - @property - def move_instance(self) -> Callable[ - [spanner_instance_admin.MoveInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._MoveInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance_config(self) -> Callable[ - [spanner_instance_admin.UpdateInstanceConfigRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstanceConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance_partition(self) -> Callable[ - [spanner_instance_admin.UpdateInstancePartitionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstancePartition(self._session, self._host, self._interceptor) # type: ignore - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseInstanceAdminRestTransport._BaseCancelOperation, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.CancelOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseInstanceAdminRestTransport._BaseDeleteOperation, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.DeleteOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseInstanceAdminRestTransport._BaseGetOperation, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.GetOperation", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseInstanceAdminRestTransport._BaseListOperations, InstanceAdminRestStub): - def __hash__(self): - return hash("InstanceAdminRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseInstanceAdminRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseInstanceAdminRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseInstanceAdminRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.spanner.admin.instance_v1.InstanceAdminClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = InstanceAdminRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.spanner.admin.instance_v1.InstanceAdminAsyncClient.ListOperations", - extra = { - "serviceName": "google.spanner.admin.instance.v1.InstanceAdmin", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'InstanceAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py deleted file mode 100644 index ef9a29eaf8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest_base.py +++ /dev/null @@ -1,1152 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import InstanceAdminTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseInstanceAdminRestTransport(InstanceAdminTransport): - """Base REST backend transport for InstanceAdmin. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'spanner.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'spanner.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/instances', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.CreateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/instanceConfigs', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.CreateInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.CreateInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseCreateInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.DeleteInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.DeleteInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.DeleteInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseDeleteInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*}:getIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.GetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.GetInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.GetInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseGetInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstanceConfigOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/instanceConfigOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstanceConfigs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/instanceConfigs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstanceConfigsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstanceConfigs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstancePartitionOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitionOperations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitionOperations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstancePartitions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/instances/*}/instancePartitions', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstancePartitionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstancePartitions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstances: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/instances', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.ListInstancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseListInstances._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseMoveInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*}:move', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.MoveInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseMoveInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/instances/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/instances/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.UpdateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstanceConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance_config.name=projects/*/instanceConfigs/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.UpdateInstanceConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstanceConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstancePartition: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = spanner_instance_admin.UpdateInstancePartitionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseInstanceAdminRestTransport._BaseUpdateInstancePartition._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}:cancel', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/databases/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/backups/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instances/*/instancePartitions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/instanceConfigs/*/ssdCaches/*/operations}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseInstanceAdminRestTransport', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py deleted file mode 100644 index 385c06fb99..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .common import ( - OperationProgress, - ReplicaSelection, - FulfillmentPeriod, -) -from .spanner_instance_admin import ( - AutoscalingConfig, - CreateInstanceConfigMetadata, - CreateInstanceConfigRequest, - CreateInstanceMetadata, - CreateInstancePartitionMetadata, - CreateInstancePartitionRequest, - CreateInstanceRequest, - DeleteInstanceConfigRequest, - DeleteInstancePartitionRequest, - DeleteInstanceRequest, - FreeInstanceMetadata, - GetInstanceConfigRequest, - GetInstancePartitionRequest, - GetInstanceRequest, - Instance, - InstanceConfig, - InstancePartition, - ListInstanceConfigOperationsRequest, - ListInstanceConfigOperationsResponse, - ListInstanceConfigsRequest, - ListInstanceConfigsResponse, - ListInstancePartitionOperationsRequest, - ListInstancePartitionOperationsResponse, - ListInstancePartitionsRequest, - ListInstancePartitionsResponse, - ListInstancesRequest, - ListInstancesResponse, - MoveInstanceMetadata, - MoveInstanceRequest, - MoveInstanceResponse, - ReplicaComputeCapacity, - ReplicaInfo, - UpdateInstanceConfigMetadata, - UpdateInstanceConfigRequest, - UpdateInstanceMetadata, - UpdateInstancePartitionMetadata, - UpdateInstancePartitionRequest, - UpdateInstanceRequest, -) - -__all__ = ( - 'OperationProgress', - 'ReplicaSelection', - 'FulfillmentPeriod', - 'AutoscalingConfig', - 'CreateInstanceConfigMetadata', - 'CreateInstanceConfigRequest', - 'CreateInstanceMetadata', - 'CreateInstancePartitionMetadata', - 'CreateInstancePartitionRequest', - 'CreateInstanceRequest', - 'DeleteInstanceConfigRequest', - 'DeleteInstancePartitionRequest', - 'DeleteInstanceRequest', - 'FreeInstanceMetadata', - 'GetInstanceConfigRequest', - 'GetInstancePartitionRequest', - 'GetInstanceRequest', - 'Instance', - 'InstanceConfig', - 'InstancePartition', - 'ListInstanceConfigOperationsRequest', - 'ListInstanceConfigOperationsResponse', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'ListInstancePartitionOperationsRequest', - 'ListInstancePartitionOperationsResponse', - 'ListInstancePartitionsRequest', - 'ListInstancePartitionsResponse', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MoveInstanceMetadata', - 'MoveInstanceRequest', - 'MoveInstanceResponse', - 'ReplicaComputeCapacity', - 'ReplicaInfo', - 'UpdateInstanceConfigMetadata', - 'UpdateInstanceConfigRequest', - 'UpdateInstanceMetadata', - 'UpdateInstancePartitionMetadata', - 'UpdateInstancePartitionRequest', - 'UpdateInstanceRequest', -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py deleted file mode 100644 index 5bc1c6b158..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/common.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.instance.v1', - manifest={ - 'FulfillmentPeriod', - 'OperationProgress', - 'ReplicaSelection', - }, -) - - -class FulfillmentPeriod(proto.Enum): - r"""Indicates the expected fulfillment period of an operation. - - Values: - FULFILLMENT_PERIOD_UNSPECIFIED (0): - Not specified. - FULFILLMENT_PERIOD_NORMAL (1): - Normal fulfillment period. The operation is - expected to complete within minutes. - FULFILLMENT_PERIOD_EXTENDED (2): - Extended fulfillment period. It can take up - to an hour for the operation to complete. - """ - FULFILLMENT_PERIOD_UNSPECIFIED = 0 - FULFILLMENT_PERIOD_NORMAL = 1 - FULFILLMENT_PERIOD_EXTENDED = 2 - - -class OperationProgress(proto.Message): - r"""Encapsulates progress related information for a Cloud Spanner - long running instance operations. - - Attributes: - progress_percent (int): - Percent completion of the operation. - Values are between 0 and 100 inclusive. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time the request was received. - end_time (google.protobuf.timestamp_pb2.Timestamp): - If set, the time at which this operation - failed or was completed successfully. - """ - - progress_percent: int = proto.Field( - proto.INT32, - number=1, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class ReplicaSelection(proto.Message): - r"""ReplicaSelection identifies replicas with common properties. - - Attributes: - location (str): - Required. Name of the location of the - replicas (e.g., "us-central1"). - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py deleted file mode 100644 index d73305b19f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py +++ /dev/null @@ -1,2377 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.spanner_admin_instance_v1.types import common -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.spanner.admin.instance.v1', - manifest={ - 'ReplicaInfo', - 'InstanceConfig', - 'ReplicaComputeCapacity', - 'AutoscalingConfig', - 'Instance', - 'ListInstanceConfigsRequest', - 'ListInstanceConfigsResponse', - 'GetInstanceConfigRequest', - 'CreateInstanceConfigRequest', - 'UpdateInstanceConfigRequest', - 'DeleteInstanceConfigRequest', - 'ListInstanceConfigOperationsRequest', - 'ListInstanceConfigOperationsResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'CreateInstanceMetadata', - 'UpdateInstanceMetadata', - 'FreeInstanceMetadata', - 'CreateInstanceConfigMetadata', - 'UpdateInstanceConfigMetadata', - 'InstancePartition', - 'CreateInstancePartitionMetadata', - 'CreateInstancePartitionRequest', - 'DeleteInstancePartitionRequest', - 'GetInstancePartitionRequest', - 'UpdateInstancePartitionRequest', - 'UpdateInstancePartitionMetadata', - 'ListInstancePartitionsRequest', - 'ListInstancePartitionsResponse', - 'ListInstancePartitionOperationsRequest', - 'ListInstancePartitionOperationsResponse', - 'MoveInstanceRequest', - 'MoveInstanceResponse', - 'MoveInstanceMetadata', - }, -) - - -class ReplicaInfo(proto.Message): - r""" - - Attributes: - location (str): - The location of the serving resources, e.g., - "us-central1". - type_ (google.cloud.spanner_admin_instance_v1.types.ReplicaInfo.ReplicaType): - The type of replica. - default_leader_location (bool): - If true, this location is designated as the default leader - location where leader replicas are placed. See the `region - types - documentation `__ - for more details. - """ - class ReplicaType(proto.Enum): - r"""Indicates the type of replica. See the `replica types - documentation `__ - for more details. - - Values: - TYPE_UNSPECIFIED (0): - Not specified. - READ_WRITE (1): - Read-write replicas support both reads and writes. These - replicas: - - - Maintain a full copy of your data. - - Serve reads. - - Can vote whether to commit a write. - - Participate in leadership election. - - Are eligible to become a leader. - READ_ONLY (2): - Read-only replicas only support reads (not writes). - Read-only replicas: - - - Maintain a full copy of your data. - - Serve reads. - - Do not participate in voting to commit writes. - - Are not eligible to become a leader. - WITNESS (3): - Witness replicas don't support reads but do participate in - voting to commit writes. Witness replicas: - - - Do not maintain a full copy of data. - - Do not serve reads. - - Vote whether to commit writes. - - Participate in leader election but are not eligible to - become leader. - """ - TYPE_UNSPECIFIED = 0 - READ_WRITE = 1 - READ_ONLY = 2 - WITNESS = 3 - - location: str = proto.Field( - proto.STRING, - number=1, - ) - type_: ReplicaType = proto.Field( - proto.ENUM, - number=2, - enum=ReplicaType, - ) - default_leader_location: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class InstanceConfig(proto.Message): - r"""A possible configuration for a Cloud Spanner instance. - Configurations define the geographic placement of nodes and - their replication. - - Attributes: - name (str): - A unique identifier for the instance configuration. Values - are of the form - ``projects//instanceConfigs/[a-z][-a-z0-9]*``. - - User instance configuration must start with ``custom-``. - display_name (str): - The name of this instance configuration as it - appears in UIs. - config_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.Type): - Output only. Whether this instance - configuration is a Google-managed or - user-managed configuration. - replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): - The geographic placement of nodes in this instance - configuration and their replication properties. - - To create user-managed configurations, input ``replicas`` - must include all replicas in ``replicas`` of the - ``base_config`` and include one or more replicas in the - ``optional_replicas`` of the ``base_config``. - optional_replicas (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaInfo]): - Output only. The available optional replicas - to choose from for user-managed configurations. - Populated for Google-managed configurations. - base_config (str): - Base configuration name, e.g. - projects//instanceConfigs/nam3, based on which - this configuration is created. Only set for user-managed - configurations. ``base_config`` must refer to a - configuration of type ``GOOGLE_MANAGED`` in the same project - as this configuration. - labels (MutableMapping[str, str]): - Cloud Labels are a flexible and lightweight mechanism for - organizing cloud resources into groups that reflect a - customer's organizational needs and deployment strategies. - Cloud Labels can be used to filter collections of resources. - They can be used to control how resource metrics are - aggregated. And they can be used as arguments to policy - management rules (e.g. route, firewall, load balancing, - etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z][a-z0-9_-]{0,62}``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``[a-z0-9_-]{0,63}``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. - Therefore, you are advised to use an internal label - representation, such as JSON, which doesn't rely upon - specific characters being disallowed. For example, - representing labels as the string: name + "*" + value would - prove problematic if we were to allow "*" in a future - release. - etag (str): - etag is used for optimistic concurrency - control as a way to help prevent simultaneous - updates of a instance configuration from - overwriting each other. It is strongly suggested - that systems make use of the etag in the - read-modify-write cycle to perform instance - configuration updates in order to avoid race - conditions: An etag is returned in the response - which contains instance configurations, and - systems are expected to put that etag in the - request to update instance configuration to - ensure that their change is applied to the same - version of the instance configuration. If no - etag is provided in the call to update the - instance configuration, then the existing - instance configuration is overwritten blindly. - leader_options (MutableSequence[str]): - Allowed values of the "default_leader" schema option for - databases in instances that use this instance configuration. - reconciling (bool): - Output only. If true, the instance - configuration is being created or updated. If - false, there are no ongoing operations for the - instance configuration. - state (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.State): - Output only. The current instance configuration state. - Applicable only for ``USER_MANAGED`` configurations. - free_instance_availability (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.FreeInstanceAvailability): - Output only. Describes whether free instances - are available to be created in this instance - configuration. - quorum_type (google.cloud.spanner_admin_instance_v1.types.InstanceConfig.QuorumType): - Output only. The ``QuorumType`` of the instance - configuration. - storage_limit_per_processing_unit (int): - Output only. The storage limit in bytes per - processing unit. - """ - class Type(proto.Enum): - r"""The type of this configuration. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - GOOGLE_MANAGED (1): - Google-managed configuration. - USER_MANAGED (2): - User-managed configuration. - """ - TYPE_UNSPECIFIED = 0 - GOOGLE_MANAGED = 1 - USER_MANAGED = 2 - - class State(proto.Enum): - r"""Indicates the current state of the instance configuration. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The instance configuration is still being - created. - READY (2): - The instance configuration is fully created - and ready to be used to create instances. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - class FreeInstanceAvailability(proto.Enum): - r"""Describes the availability for free instances to be created - in an instance configuration. - - Values: - FREE_INSTANCE_AVAILABILITY_UNSPECIFIED (0): - Not specified. - AVAILABLE (1): - Indicates that free instances are available - to be created in this instance configuration. - UNSUPPORTED (2): - Indicates that free instances are not - supported in this instance configuration. - DISABLED (3): - Indicates that free instances are currently - not available to be created in this instance - configuration. - QUOTA_EXCEEDED (4): - Indicates that additional free instances - cannot be created in this instance configuration - because the project has reached its limit of - free instances. - """ - FREE_INSTANCE_AVAILABILITY_UNSPECIFIED = 0 - AVAILABLE = 1 - UNSUPPORTED = 2 - DISABLED = 3 - QUOTA_EXCEEDED = 4 - - class QuorumType(proto.Enum): - r"""Indicates the quorum type of this instance configuration. - - Values: - QUORUM_TYPE_UNSPECIFIED (0): - Quorum type not specified. - REGION (1): - An instance configuration tagged with ``REGION`` quorum type - forms a write quorum in a single region. - DUAL_REGION (2): - An instance configuration tagged with the ``DUAL_REGION`` - quorum type forms a write quorum with exactly two read-write - regions in a multi-region configuration. - - This instance configuration requires failover in the event - of regional failures. - MULTI_REGION (3): - An instance configuration tagged with the ``MULTI_REGION`` - quorum type forms a write quorum from replicas that are - spread across more than one region in a multi-region - configuration. - """ - QUORUM_TYPE_UNSPECIFIED = 0 - REGION = 1 - DUAL_REGION = 2 - MULTI_REGION = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - config_type: Type = proto.Field( - proto.ENUM, - number=5, - enum=Type, - ) - replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ReplicaInfo', - ) - optional_replicas: MutableSequence['ReplicaInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='ReplicaInfo', - ) - base_config: str = proto.Field( - proto.STRING, - number=7, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - etag: str = proto.Field( - proto.STRING, - number=9, - ) - leader_options: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - reconciling: bool = proto.Field( - proto.BOOL, - number=10, - ) - state: State = proto.Field( - proto.ENUM, - number=11, - enum=State, - ) - free_instance_availability: FreeInstanceAvailability = proto.Field( - proto.ENUM, - number=12, - enum=FreeInstanceAvailability, - ) - quorum_type: QuorumType = proto.Field( - proto.ENUM, - number=18, - enum=QuorumType, - ) - storage_limit_per_processing_unit: int = proto.Field( - proto.INT64, - number=19, - ) - - -class ReplicaComputeCapacity(proto.Message): - r"""ReplicaComputeCapacity describes the amount of server - resources that are allocated to each replica identified by the - replica selection. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): - Required. Identifies replicas by specified - properties. All replicas in the selection have - the same amount of compute capacity. - node_count (int): - The number of nodes allocated to each replica. - - This may be zero in API responses for instances that are not - yet in state ``READY``. - - This field is a member of `oneof`_ ``compute_capacity``. - processing_units (int): - The number of processing units allocated to each replica. - - This may be zero in API responses for instances that are not - yet in state ``READY``. - - This field is a member of `oneof`_ ``compute_capacity``. - """ - - replica_selection: common.ReplicaSelection = proto.Field( - proto.MESSAGE, - number=1, - message=common.ReplicaSelection, - ) - node_count: int = proto.Field( - proto.INT32, - number=2, - oneof='compute_capacity', - ) - processing_units: int = proto.Field( - proto.INT32, - number=3, - oneof='compute_capacity', - ) - - -class AutoscalingConfig(proto.Message): - r"""Autoscaling configuration for an instance. - - Attributes: - autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): - Required. Autoscaling limits for an instance. - autoscaling_targets (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingTargets): - Required. The autoscaling targets for an - instance. - asymmetric_autoscaling_options (MutableSequence[google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption]): - Optional. Optional asymmetric autoscaling - options. Replicas matching the replica selection - criteria will be autoscaled independently from - other replicas. The autoscaler will scale the - replicas based on the utilization of replicas - identified by the replica selection. Replica - selections should not overlap with each other. - - Other replicas (those do not match any replica - selection) will be autoscaled together and will - have the same compute capacity allocated to - them. - """ - - class AutoscalingLimits(proto.Message): - r"""The autoscaling limits for the instance. Users can define the - minimum and maximum compute capacity allocated to the instance, and - the autoscaler will only scale within that range. Users can either - use nodes or processing units to specify the limits, but should use - the same unit to set both the min_limit and max_limit. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - min_nodes (int): - Minimum number of nodes allocated to the - instance. If set, this number should be greater - than or equal to 1. - - This field is a member of `oneof`_ ``min_limit``. - min_processing_units (int): - Minimum number of processing units allocated - to the instance. If set, this number should be - multiples of 1000. - - This field is a member of `oneof`_ ``min_limit``. - max_nodes (int): - Maximum number of nodes allocated to the instance. If set, - this number should be greater than or equal to min_nodes. - - This field is a member of `oneof`_ ``max_limit``. - max_processing_units (int): - Maximum number of processing units allocated to the - instance. If set, this number should be multiples of 1000 - and be greater than or equal to min_processing_units. - - This field is a member of `oneof`_ ``max_limit``. - """ - - min_nodes: int = proto.Field( - proto.INT32, - number=1, - oneof='min_limit', - ) - min_processing_units: int = proto.Field( - proto.INT32, - number=2, - oneof='min_limit', - ) - max_nodes: int = proto.Field( - proto.INT32, - number=3, - oneof='max_limit', - ) - max_processing_units: int = proto.Field( - proto.INT32, - number=4, - oneof='max_limit', - ) - - class AutoscalingTargets(proto.Message): - r"""The autoscaling targets for an instance. - - Attributes: - high_priority_cpu_utilization_percent (int): - Required. The target high priority cpu utilization - percentage that the autoscaler should be trying to achieve - for the instance. This number is on a scale from 0 (no - utilization) to 100 (full utilization). The valid range is - [10, 90] inclusive. - storage_utilization_percent (int): - Required. The target storage utilization percentage that the - autoscaler should be trying to achieve for the instance. - This number is on a scale from 0 (no utilization) to 100 - (full utilization). The valid range is [10, 99] inclusive. - """ - - high_priority_cpu_utilization_percent: int = proto.Field( - proto.INT32, - number=1, - ) - storage_utilization_percent: int = proto.Field( - proto.INT32, - number=2, - ) - - class AsymmetricAutoscalingOption(proto.Message): - r"""AsymmetricAutoscalingOption specifies the scaling of replicas - identified by the given selection. - - Attributes: - replica_selection (google.cloud.spanner_admin_instance_v1.types.ReplicaSelection): - Required. Selects the replicas to which this - AsymmetricAutoscalingOption applies. Only - read-only replicas are supported. - overrides (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides): - Optional. Overrides applied to the top-level - autoscaling configuration for the selected - replicas. - """ - - class AutoscalingConfigOverrides(proto.Message): - r"""Overrides the top-level autoscaling configuration for the replicas - identified by ``replica_selection``. All fields in this message are - optional. Any unspecified fields will use the corresponding values - from the top-level autoscaling configuration. - - Attributes: - autoscaling_limits (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig.AutoscalingLimits): - Optional. If specified, overrides the min/max - limit in the top-level autoscaling configuration - for the selected replicas. - autoscaling_target_high_priority_cpu_utilization_percent (int): - Optional. If specified, overrides the autoscaling target - high_priority_cpu_utilization_percent in the top-level - autoscaling configuration for the selected replicas. - """ - - autoscaling_limits: 'AutoscalingConfig.AutoscalingLimits' = proto.Field( - proto.MESSAGE, - number=1, - message='AutoscalingConfig.AutoscalingLimits', - ) - autoscaling_target_high_priority_cpu_utilization_percent: int = proto.Field( - proto.INT32, - number=2, - ) - - replica_selection: common.ReplicaSelection = proto.Field( - proto.MESSAGE, - number=1, - message=common.ReplicaSelection, - ) - overrides: 'AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides' = proto.Field( - proto.MESSAGE, - number=2, - message='AutoscalingConfig.AsymmetricAutoscalingOption.AutoscalingConfigOverrides', - ) - - autoscaling_limits: AutoscalingLimits = proto.Field( - proto.MESSAGE, - number=1, - message=AutoscalingLimits, - ) - autoscaling_targets: AutoscalingTargets = proto.Field( - proto.MESSAGE, - number=2, - message=AutoscalingTargets, - ) - asymmetric_autoscaling_options: MutableSequence[AsymmetricAutoscalingOption] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=AsymmetricAutoscalingOption, - ) - - -class Instance(proto.Message): - r"""An isolated set of Cloud Spanner resources on which databases - can be hosted. - - Attributes: - name (str): - Required. A unique identifier for the instance, which cannot - be changed after the instance is created. Values are of the - form - ``projects//instances/[a-z][-a-z0-9]*[a-z0-9]``. - The final segment of the name must be between 2 and 64 - characters in length. - config (str): - Required. The name of the instance's configuration. Values - are of the form - ``projects//instanceConfigs/``. See - also - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - and - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - display_name (str): - Required. The descriptive name for this - instance as it appears in UIs. Must be unique - per project and between 4 and 30 characters in - length. - node_count (int): - The number of nodes allocated to this instance. At most, one - of either ``node_count`` or ``processing_units`` should be - present in the message. - - Users can set the ``node_count`` field to specify the target - number of nodes allocated to the instance. - - If autoscaling is enabled, ``node_count`` is treated as an - ``OUTPUT_ONLY`` field and reflects the current number of - nodes allocated to the instance. - - This might be zero in API responses for instances that are - not yet in the ``READY`` state. - - For more information, see `Compute capacity, nodes, and - processing - units `__. - processing_units (int): - The number of processing units allocated to this instance. - At most, one of either ``processing_units`` or - ``node_count`` should be present in the message. - - Users can set the ``processing_units`` field to specify the - target number of processing units allocated to the instance. - - If autoscaling is enabled, ``processing_units`` is treated - as an ``OUTPUT_ONLY`` field and reflects the current number - of processing units allocated to the instance. - - This might be zero in API responses for instances that are - not yet in the ``READY`` state. - - For more information, see `Compute capacity, nodes and - processing - units `__. - replica_compute_capacity (MutableSequence[google.cloud.spanner_admin_instance_v1.types.ReplicaComputeCapacity]): - Output only. Lists the compute capacity per - ReplicaSelection. A replica selection identifies - a set of replicas with common properties. - Replicas identified by a ReplicaSelection are - scaled with the same compute capacity. - autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): - Optional. The autoscaling configuration. Autoscaling is - enabled if this field is set. When autoscaling is enabled, - node_count and processing_units are treated as OUTPUT_ONLY - fields and reflect the current compute capacity allocated to - the instance. - state (google.cloud.spanner_admin_instance_v1.types.Instance.State): - Output only. The current instance state. For - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], - the state must be either omitted or set to ``CREATING``. For - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], - the state must be either omitted or set to ``READY``. - labels (MutableMapping[str, str]): - Cloud Labels are a flexible and lightweight mechanism for - organizing cloud resources into groups that reflect a - customer's organizational needs and deployment strategies. - Cloud Labels can be used to filter collections of resources. - They can be used to control how resource metrics are - aggregated. And they can be used as arguments to policy - management rules (e.g. route, firewall, load balancing, - etc.). - - - Label keys must be between 1 and 63 characters long and - must conform to the following regular expression: - ``[a-z][a-z0-9_-]{0,62}``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``[a-z0-9_-]{0,63}``. - - No more than 64 labels can be associated with a given - resource. - - See https://goo.gl/xmQnxf for more information on and - examples of labels. - - If you plan to use labels in your own code, please note that - additional characters may be allowed in the future. And so - you are advised to use an internal label representation, - such as JSON, which doesn't rely upon specific characters - being disallowed. For example, representing labels as the - string: name + "*" + value would prove problematic if we - were to allow "*" in a future release. - instance_type (google.cloud.spanner_admin_instance_v1.types.Instance.InstanceType): - The ``InstanceType`` of the current instance. - endpoint_uris (MutableSequence[str]): - Deprecated. This field is not populated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - was most recently updated. - free_instance_metadata (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata): - Free instance metadata. Only populated for - free instances. - edition (google.cloud.spanner_admin_instance_v1.types.Instance.Edition): - Optional. The ``Edition`` of the current instance. - default_backup_schedule_type (google.cloud.spanner_admin_instance_v1.types.Instance.DefaultBackupScheduleType): - Optional. Controls the default backup schedule behavior for - new databases within the instance. By default, a backup - schedule is created automatically when a new database is - created in a new instance. - - Note that the ``AUTOMATIC`` value isn't permitted for free - instances, as backups and backup schedules aren't supported - for free instances. - - In the ``GetInstance`` or ``ListInstances`` response, if the - value of ``default_backup_schedule_type`` isn't set, or set - to ``NONE``, Spanner doesn't create a default backup - schedule for new databases in the instance. - """ - class State(proto.Enum): - r"""Indicates the current state of the instance. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The instance is still being created. - Resources may not be available yet, and - operations such as database creation may not - work. - READY (2): - The instance is fully created and ready to do - work such as creating databases. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - class InstanceType(proto.Enum): - r"""The type of this instance. The type can be used to distinguish - product variants, that can affect aspects like: usage restrictions, - quotas and billing. Currently this is used to distinguish - FREE_INSTANCE vs PROVISIONED instances. - - Values: - INSTANCE_TYPE_UNSPECIFIED (0): - Not specified. - PROVISIONED (1): - Provisioned instances have dedicated - resources, standard usage limits and support. - FREE_INSTANCE (2): - Free instances provide no guarantee for dedicated resources, - [node_count, processing_units] should be 0. They come with - stricter usage limits and limited support. - """ - INSTANCE_TYPE_UNSPECIFIED = 0 - PROVISIONED = 1 - FREE_INSTANCE = 2 - - class Edition(proto.Enum): - r"""The edition selected for this instance. Different editions - provide different capabilities at different price points. - - Values: - EDITION_UNSPECIFIED (0): - Edition not specified. - STANDARD (1): - Standard edition. - ENTERPRISE (2): - Enterprise edition. - ENTERPRISE_PLUS (3): - Enterprise Plus edition. - """ - EDITION_UNSPECIFIED = 0 - STANDARD = 1 - ENTERPRISE = 2 - ENTERPRISE_PLUS = 3 - - class DefaultBackupScheduleType(proto.Enum): - r"""Indicates the `default backup - schedule `__ - behavior for new databases within the instance. - - Values: - DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED (0): - Not specified. - NONE (1): - A default backup schedule isn't created - automatically when a new database is created in - the instance. - AUTOMATIC (2): - A default backup schedule is created - automatically when a new database is created in - the instance. The default backup schedule - creates a full backup every 24 hours. These full - backups are retained for 7 days. You can edit or - delete the default backup schedule once it's - created. - """ - DEFAULT_BACKUP_SCHEDULE_TYPE_UNSPECIFIED = 0 - NONE = 1 - AUTOMATIC = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - node_count: int = proto.Field( - proto.INT32, - number=5, - ) - processing_units: int = proto.Field( - proto.INT32, - number=9, - ) - replica_compute_capacity: MutableSequence['ReplicaComputeCapacity'] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message='ReplicaComputeCapacity', - ) - autoscaling_config: 'AutoscalingConfig' = proto.Field( - proto.MESSAGE, - number=17, - message='AutoscalingConfig', - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - instance_type: InstanceType = proto.Field( - proto.ENUM, - number=10, - enum=InstanceType, - ) - endpoint_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - free_instance_metadata: 'FreeInstanceMetadata' = proto.Field( - proto.MESSAGE, - number=13, - message='FreeInstanceMetadata', - ) - edition: Edition = proto.Field( - proto.ENUM, - number=20, - enum=Edition, - ) - default_backup_schedule_type: DefaultBackupScheduleType = proto.Field( - proto.ENUM, - number=23, - enum=DefaultBackupScheduleType, - ) - - -class ListInstanceConfigsRequest(proto.Message): - r"""The request for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Attributes: - parent (str): - Required. The name of the project for which a list of - supported instance configurations is requested. Values are - of the form ``projects/``. - page_size (int): - Number of instance configurations to be - returned in the response. If 0 or less, defaults - to the server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] - from a previous - [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInstanceConfigsResponse(proto.Message): - r"""The response for - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - - Attributes: - instance_configs (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstanceConfig]): - The list of requested instance - configurations. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] - call to fetch more of the matching instance configurations. - """ - - @property - def raw_page(self): - return self - - instance_configs: MutableSequence['InstanceConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstanceConfigRequest(proto.Message): - r"""The request for - [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. - - Attributes: - name (str): - Required. The name of the requested instance configuration. - Values are of the form - ``projects//instanceConfigs/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceConfigRequest(proto.Message): - r"""The request for - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - - Attributes: - parent (str): - Required. The name of the project in which to create the - instance configuration. Values are of the form - ``projects/``. - instance_config_id (str): - Required. The ID of the instance configuration to create. - Valid identifiers are of the form - ``custom-[-a-z0-9]*[a-z0-9]`` and must be between 2 and 64 - characters in length. The ``custom-`` prefix is required to - avoid name conflicts with Google-managed configurations. - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The ``InstanceConfig`` proto of the configuration - to create. ``instance_config.name`` must be - ``/instanceConfigs/``. - ``instance_config.base_config`` must be a Google-managed - configuration name, e.g. /instanceConfigs/us-east1, - /instanceConfigs/nam3. - validate_only (bool): - An option to validate, but not actually - execute, a request, and provide the same - response. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_config_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InstanceConfig', - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class UpdateInstanceConfigRequest(proto.Message): - r"""The request for - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - - Attributes: - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - Required. The user instance configuration to update, which - must always include the instance configuration name. - Otherwise, only fields mentioned in - [update_mask][google.spanner.admin.instance.v1.UpdateInstanceConfigRequest.update_mask] - need be included. To prevent conflicts of concurrent - updates, - [etag][google.spanner.admin.instance.v1.InstanceConfig.reconciling] - can be used. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - should be updated. The field mask must always be specified; - this prevents any future fields in - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - from being erased accidentally by clients that do not know - about them. Only display_name and labels can be updated. - validate_only (bool): - An option to validate, but not actually - execute, a request, and provide the same - response. - """ - - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DeleteInstanceConfigRequest(proto.Message): - r"""The request for - [DeleteInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig]. - - Attributes: - name (str): - Required. The name of the instance configuration to be - deleted. Values are of the form - ``projects//instanceConfigs/`` - etag (str): - Used for optimistic concurrency control as a - way to help prevent simultaneous deletes of an - instance configuration from overwriting each - other. If not empty, the API - only deletes the instance configuration when the - etag provided matches the current status of the - requested instance configuration. Otherwise, - deletes the instance configuration without - checking the current status of the requested - instance configuration. - validate_only (bool): - An option to validate, but not actually - execute, a request, and provide the same - response. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ListInstanceConfigOperationsRequest(proto.Message): - r"""The request for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Attributes: - parent (str): - Required. The project of the instance configuration - operations. Values are of the form ``projects/``. - filter (str): - An expression that filters the list of returned operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the Operation are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata] - is - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first, if filtering - on metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=`` - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstanceConfigMetadata) AND`` - ``(metadata.instance_config.name:custom-config) AND`` - ``(metadata.progress.start_time < \"2021-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [CreateInstanceConfigMetadata][google.spanner.admin.instance.v1.CreateInstanceConfigMetadata]. - - The instance configuration name contains - "custom-config". - - The operation started before 2021-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Number of operations to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse.next_page_token] - from a previous - [ListInstanceConfigOperationsResponse][google.spanner.admin.instance.v1.ListInstanceConfigOperationsResponse] - to the same ``parent`` and with the same ``filter``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListInstanceConfigOperationsResponse(proto.Message): - r"""The response for - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching instance configuration long-running - operations. Each operation's name will be prefixed by the - name of the instance configuration. The operation's metadata - field type ``metadata.type_url`` describes the type of the - metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstanceConfigOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations] - call to fetch more of the matching metadata. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstanceRequest(proto.Message): - r"""The request for - [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. - - Attributes: - name (str): - Required. The name of the requested instance. Values are of - the form ``projects//instances/``. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - If field_mask is present, specifies the subset of - [Instance][google.spanner.admin.instance.v1.Instance] fields - that should be returned. If absent, all - [Instance][google.spanner.admin.instance.v1.Instance] fields - are returned. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class CreateInstanceRequest(proto.Message): - r"""The request for - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - - Attributes: - parent (str): - Required. The name of the project in which to create the - instance. Values are of the form ``projects/``. - instance_id (str): - Required. The ID of the instance to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and - must be between 2 and 64 characters in length. - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to create. The name may be omitted, - but if specified must be - ``/instances/``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=3, - message='Instance', - ) - - -class ListInstancesRequest(proto.Message): - r"""The request for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Attributes: - parent (str): - Required. The name of the project for which a list of - instances is requested. Values are of the form - ``projects/``. - page_size (int): - Number of instances to be returned in the - response. If 0 or less, defaults to the server's - maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] - from a previous - [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. - filter (str): - An expression for filtering the results of the request. - Filter rules are case insensitive. The fields eligible for - filtering are: - - - ``name`` - - ``display_name`` - - ``labels.key`` where key is the name of a label - - Some examples of using filters are: - - - ``name:*`` --> The instance has a name. - - ``name:Howl`` --> The instance's name contains the string - "howl". - - ``name:HOWL`` --> Equivalent to above. - - ``NAME:howl`` --> Equivalent to above. - - ``labels.env:*`` --> The instance has the label "env". - - ``labels.env:dev`` --> The instance has the label "env" - and the value of the label contains the string "dev". - - ``name:howl labels.env:dev`` --> The instance's name - contains "howl" and it has the label "env" with its value - containing "dev". - instance_deadline (google.protobuf.timestamp_pb2.Timestamp): - Deadline used while retrieving metadata for instances. - Instances whose metadata cannot be retrieved within this - deadline will be added to - [unreachable][google.spanner.admin.instance.v1.ListInstancesResponse.unreachable] - in - [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - instance_deadline: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancesResponse(proto.Message): - r"""The response for - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. - - Attributes: - instances (MutableSequence[google.cloud.spanner_admin_instance_v1.types.Instance]): - The list of requested instances. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] - call to fetch more of the matching instances. - unreachable (MutableSequence[str]): - The list of unreachable instances. It includes the names of - instances whose metadata could not be retrieved within - [instance_deadline][google.spanner.admin.instance.v1.ListInstancesRequest.instance_deadline]. - """ - - @property - def raw_page(self): - return self - - instances: MutableSequence['Instance'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Instance', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateInstanceRequest(proto.Message): - r"""The request for - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - Required. The instance to update, which must always include - the instance name. Otherwise, only fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] - need be included. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [Instance][google.spanner.admin.instance.v1.Instance] should - be updated. The field mask must always be specified; this - prevents any future fields in - [Instance][google.spanner.admin.instance.v1.Instance] from - being erased accidentally by clients that do not know about - them. - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteInstanceRequest(proto.Message): - r"""The request for - [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. - - Attributes: - name (str): - Required. The name of the instance to be deleted. Values are - of the form ``projects//instances/`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - The instance being created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the - [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): - The expected fulfillment period of this - create operation. - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( - proto.ENUM, - number=5, - enum=common.FulfillmentPeriod, - ) - - -class UpdateInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. - - Attributes: - instance (google.cloud.spanner_admin_instance_v1.types.Instance): - The desired end state of the update. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which - [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - expected_fulfillment_period (google.cloud.spanner_admin_instance_v1.types.FulfillmentPeriod): - The expected fulfillment period of this - update operation. - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expected_fulfillment_period: common.FulfillmentPeriod = proto.Field( - proto.ENUM, - number=5, - enum=common.FulfillmentPeriod, - ) - - -class FreeInstanceMetadata(proto.Message): - r"""Free instance specific metadata that is kept even after an - instance has been upgraded for tracking purposes. - - Attributes: - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp after which the - instance will either be upgraded or scheduled - for deletion after a grace period. - ExpireBehavior is used to choose between - upgrading or scheduling the free instance for - deletion. This timestamp is set during the - creation of a free instance. - upgrade_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If present, the timestamp at - which the free instance was upgraded to a - provisioned instance. - expire_behavior (google.cloud.spanner_admin_instance_v1.types.FreeInstanceMetadata.ExpireBehavior): - Specifies the expiration behavior of a free instance. The - default of ExpireBehavior is ``REMOVE_AFTER_GRACE_PERIOD``. - This can be modified during or after creation, and before - expiration. - """ - class ExpireBehavior(proto.Enum): - r"""Allows users to change behavior when a free instance expires. - - Values: - EXPIRE_BEHAVIOR_UNSPECIFIED (0): - Not specified. - FREE_TO_PROVISIONED (1): - When the free instance expires, upgrade the - instance to a provisioned instance. - REMOVE_AFTER_GRACE_PERIOD (2): - When the free instance expires, disable the - instance, and delete it after the grace period - passes if it has not been upgraded. - """ - EXPIRE_BEHAVIOR_UNSPECIFIED = 0 - FREE_TO_PROVISIONED = 1 - REMOVE_AFTER_GRACE_PERIOD = 2 - - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - upgrade_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - expire_behavior: ExpireBehavior = proto.Field( - proto.ENUM, - number=3, - enum=ExpireBehavior, - ) - - -class CreateInstanceConfigMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig]. - - Attributes: - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - The target instance configuration end state. - progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): - The progress of the - [CreateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. - """ - - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class UpdateInstanceConfigMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig]. - - Attributes: - instance_config (google.cloud.spanner_admin_instance_v1.types.InstanceConfig): - The desired instance configuration after - updating. - progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): - The progress of the - [UpdateInstanceConfig][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig] - operation. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. - """ - - instance_config: 'InstanceConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='InstanceConfig', - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class InstancePartition(proto.Message): - r"""An isolated set of Cloud Spanner resources that databases can - define placements on. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. A unique identifier for the instance partition. - Values are of the form - ``projects//instances//instancePartitions/[a-z][-a-z0-9]*[a-z0-9]``. - The final segment of the name must be between 2 and 64 - characters in length. An instance partition's name cannot be - changed after the instance partition is created. - config (str): - Required. The name of the instance partition's - configuration. Values are of the form - ``projects//instanceConfigs/``. See - also - [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] - and - [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. - display_name (str): - Required. The descriptive name for this - instance partition as it appears in UIs. Must be - unique per project and between 4 and 30 - characters in length. - node_count (int): - The number of nodes allocated to this instance partition. - - Users can set the ``node_count`` field to specify the target - number of nodes allocated to the instance partition. - - This may be zero in API responses for instance partitions - that are not yet in state ``READY``. - - This field is a member of `oneof`_ ``compute_capacity``. - processing_units (int): - The number of processing units allocated to this instance - partition. - - Users can set the ``processing_units`` field to specify the - target number of processing units allocated to the instance - partition. - - This might be zero in API responses for instance partitions - that are not yet in the ``READY`` state. - - This field is a member of `oneof`_ ``compute_capacity``. - autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig): - Optional. The autoscaling configuration. Autoscaling is - enabled if this field is set. When autoscaling is enabled, - fields in compute_capacity are treated as OUTPUT_ONLY fields - and reflect the current compute capacity allocated to the - instance partition. - state (google.cloud.spanner_admin_instance_v1.types.InstancePartition.State): - Output only. The current instance partition - state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - partition was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the instance - partition was most recently updated. - referencing_databases (MutableSequence[str]): - Output only. The names of the databases that - reference this instance partition. Referencing - databases should share the parent instance. The - existence of any referencing database prevents - the instance partition from being deleted. - referencing_backups (MutableSequence[str]): - Output only. Deprecated: This field is not - populated. Output only. The names of the backups - that reference this instance partition. - Referencing backups should share the parent - instance. The existence of any referencing - backup prevents the instance partition from - being deleted. - etag (str): - Used for optimistic concurrency control as a - way to help prevent simultaneous updates of a - instance partition from overwriting each other. - It is strongly suggested that systems make use - of the etag in the read-modify-write cycle to - perform instance partition updates in order to - avoid race conditions: An etag is returned in - the response which contains instance partitions, - and systems are expected to put that etag in the - request to update instance partitions to ensure - that their change will be applied to the same - version of the instance partition. If no etag is - provided in the call to update instance - partition, then the existing instance partition - is overwritten blindly. - """ - class State(proto.Enum): - r"""Indicates the current state of the instance partition. - - Values: - STATE_UNSPECIFIED (0): - Not specified. - CREATING (1): - The instance partition is still being - created. Resources may not be available yet, and - operations such as creating placements using - this instance partition may not work. - READY (2): - The instance partition is fully created and - ready to do work such as creating placements and - using in databases. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - node_count: int = proto.Field( - proto.INT32, - number=5, - oneof='compute_capacity', - ) - processing_units: int = proto.Field( - proto.INT32, - number=6, - oneof='compute_capacity', - ) - autoscaling_config: 'AutoscalingConfig' = proto.Field( - proto.MESSAGE, - number=13, - message='AutoscalingConfig', - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - referencing_databases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=10, - ) - referencing_backups: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=11, - ) - etag: str = proto.Field( - proto.STRING, - number=12, - ) - - -class CreateInstancePartitionMetadata(proto.Message): - r"""Metadata type for the operation returned by - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - - Attributes: - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - The instance partition being created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - """ - - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class CreateInstancePartitionRequest(proto.Message): - r"""The request for - [CreateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition]. - - Attributes: - parent (str): - Required. The name of the instance in which to create the - instance partition. Values are of the form - ``projects//instances/``. - instance_partition_id (str): - Required. The ID of the instance partition to create. Valid - identifiers are of the form ``[a-z][-a-z0-9]*[a-z0-9]`` and - must be between 2 and 64 characters in length. - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to create. The - instance_partition.name may be omitted, but if specified - must be - ``/instancePartitions/``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - instance_partition_id: str = proto.Field( - proto.STRING, - number=2, - ) - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=3, - message='InstancePartition', - ) - - -class DeleteInstancePartitionRequest(proto.Message): - r"""The request for - [DeleteInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition]. - - Attributes: - name (str): - Required. The name of the instance partition to be deleted. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}`` - etag (str): - Optional. If not empty, the API only deletes - the instance partition when the etag provided - matches the current status of the requested - instance partition. Otherwise, deletes the - instance partition without checking the current - status of the requested instance partition. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetInstancePartitionRequest(proto.Message): - r"""The request for - [GetInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition]. - - Attributes: - name (str): - Required. The name of the requested instance partition. - Values are of the form - ``projects/{project}/instances/{instance}/instancePartitions/{instance_partition}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateInstancePartitionRequest(proto.Message): - r"""The request for - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - - Attributes: - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - Required. The instance partition to update, which must - always include the instance partition name. Otherwise, only - fields mentioned in - [field_mask][google.spanner.admin.instance.v1.UpdateInstancePartitionRequest.field_mask] - need be included. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A mask specifying which fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - should be updated. The field mask must always be specified; - this prevents any future fields in - [InstancePartition][google.spanner.admin.instance.v1.InstancePartition] - from being erased accidentally by clients that do not know - about them. - """ - - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class UpdateInstancePartitionMetadata(proto.Message): - r"""Metadata type for the operation returned by - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition]. - - Attributes: - instance_partition (google.cloud.spanner_admin_instance_v1.types.InstancePartition): - The desired end state of the update. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which - [UpdateInstancePartition][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition] - request was received. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. If set, this operation is in the - process of undoing itself (which is guaranteed - to succeed) and cannot be cancelled again. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation failed or - was completed successfully. - """ - - instance_partition: 'InstancePartition' = proto.Field( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancePartitionsRequest(proto.Message): - r"""The request for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Attributes: - parent (str): - Required. The instance whose instance partitions should be - listed. Values are of the form - ``projects//instances/``. Use - ``{instance} = '-'`` to list instance partitions for all - Instances in a project, e.g., - ``projects/myproject/instances/-``. - page_size (int): - Number of instance partitions to be returned - in the response. If 0 or less, defaults to the - server's maximum allowed page size. - page_token (str): - If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.next_page_token] - from a previous - [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. - instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): - Optional. Deadline used while retrieving metadata for - instance partitions. Instance partitions whose metadata - cannot be retrieved within this deadline will be added to - [unreachable][google.spanner.admin.instance.v1.ListInstancePartitionsResponse.unreachable] - in - [ListInstancePartitionsResponse][google.spanner.admin.instance.v1.ListInstancePartitionsResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancePartitionsResponse(proto.Message): - r"""The response for - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions]. - - Attributes: - instance_partitions (MutableSequence[google.cloud.spanner_admin_instance_v1.types.InstancePartition]): - The list of requested instancePartitions. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstancePartitions][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions] - call to fetch more of the matching instance partitions. - unreachable (MutableSequence[str]): - The list of unreachable instances or instance partitions. It - includes the names of instances or instance partitions whose - metadata could not be retrieved within - [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionsRequest.instance_partition_deadline]. - """ - - @property - def raw_page(self): - return self - - instance_partitions: MutableSequence['InstancePartition'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InstancePartition', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class ListInstancePartitionOperationsRequest(proto.Message): - r"""The request for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Attributes: - parent (str): - Required. The parent instance of the instance partition - operations. Values are of the form - ``projects//instances/``. - filter (str): - Optional. An expression that filters the list of returned - operations. - - A filter expression consists of a field name, a comparison - operator, and a value for filtering. The value must be a - string, a number, or a boolean. The comparison operator must - be one of: ``<``, ``>``, ``<=``, ``>=``, ``!=``, ``=``, or - ``:``. Colon ``:`` is the contains operator. Filter rules - are not case sensitive. - - The following fields in the Operation are eligible for - filtering: - - - ``name`` - The name of the long-running operation - - ``done`` - False if the operation is in progress, else - true. - - ``metadata.@type`` - the type of metadata. For example, - the type string for - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata] - is - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata``. - - ``metadata.`` - any field in metadata.value. - ``metadata.@type`` must be specified first, if filtering - on metadata fields. - - ``error`` - Error associated with the long-running - operation. - - ``response.@type`` - the type of response. - - ``response.`` - any field in response.value. - - You can combine multiple expressions by enclosing each - expression in parentheses. By default, expressions are - combined with AND logic. However, you can specify AND, OR, - and NOT logic explicitly. - - Here are a few examples: - - - ``done:true`` - The operation is complete. - - ``(metadata.@type=`` - ``type.googleapis.com/google.spanner.admin.instance.v1.CreateInstancePartitionMetadata) AND`` - ``(metadata.instance_partition.name:custom-instance-partition) AND`` - ``(metadata.start_time < \"2021-03-28T14:50:00Z\") AND`` - ``(error:*)`` - Return operations where: - - - The operation's metadata type is - [CreateInstancePartitionMetadata][google.spanner.admin.instance.v1.CreateInstancePartitionMetadata]. - - The instance partition name contains - "custom-instance-partition". - - The operation started before 2021-03-28T14:50:00Z. - - The operation resulted in an error. - page_size (int): - Optional. Number of operations to be returned - in the response. If 0 or less, defaults to the - server's maximum allowed page size. - page_token (str): - Optional. If non-empty, ``page_token`` should contain a - [next_page_token][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.next_page_token] - from a previous - [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse] - to the same ``parent`` and with the same ``filter``. - instance_partition_deadline (google.protobuf.timestamp_pb2.Timestamp): - Optional. Deadline used while retrieving metadata for - instance partition operations. Instance partitions whose - operation metadata cannot be retrieved within this deadline - will be added to - [unreachable_instance_partitions][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse.unreachable_instance_partitions] - in - [ListInstancePartitionOperationsResponse][google.spanner.admin.instance.v1.ListInstancePartitionOperationsResponse]. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - instance_partition_deadline: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class ListInstancePartitionOperationsResponse(proto.Message): - r"""The response for - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations]. - - Attributes: - operations (MutableSequence[google.longrunning.operations_pb2.Operation]): - The list of matching instance partition long-running - operations. Each operation's name will be prefixed by the - instance partition's name. The operation's metadata field - type ``metadata.type_url`` describes the type of the - metadata. - next_page_token (str): - ``next_page_token`` can be sent in a subsequent - [ListInstancePartitionOperations][google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations] - call to fetch more of the matching metadata. - unreachable_instance_partitions (MutableSequence[str]): - The list of unreachable instance partitions. It includes the - names of instance partitions whose operation metadata could - not be retrieved within - [instance_partition_deadline][google.spanner.admin.instance.v1.ListInstancePartitionOperationsRequest.instance_partition_deadline]. - """ - - @property - def raw_page(self): - return self - - operations: MutableSequence[operations_pb2.Operation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=operations_pb2.Operation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable_instance_partitions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class MoveInstanceRequest(proto.Message): - r"""The request for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - Attributes: - name (str): - Required. The instance to move. Values are of the form - ``projects//instances/``. - target_config (str): - Required. The target instance configuration where to move - the instance. Values are of the form - ``projects//instanceConfigs/``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - target_config: str = proto.Field( - proto.STRING, - number=2, - ) - - -class MoveInstanceResponse(proto.Message): - r"""The response for - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - """ - - -class MoveInstanceMetadata(proto.Message): - r"""Metadata type for the operation returned by - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance]. - - Attributes: - target_config (str): - The target instance configuration where to move the - instance. Values are of the form - ``projects//instanceConfigs/``. - progress (google.cloud.spanner_admin_instance_v1.types.OperationProgress): - The progress of the - [MoveInstance][google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance] - operation. - [progress_percent][google.spanner.admin.instance.v1.OperationProgress.progress_percent] - is reset when cancellation is requested. - cancel_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which this operation was - cancelled. - """ - - target_config: str = proto.Field( - proto.STRING, - number=1, - ) - progress: common.OperationProgress = proto.Field( - proto.MESSAGE, - number=2, - message=common.OperationProgress, - ) - cancel_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini b/owl-bot-staging/spanner_admin_instance/v1/mypy.ini deleted file mode 100644 index 574c5aed39..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py b/owl-bot-staging/spanner_admin_instance/v1/noxfile.py deleted file mode 100644 index 5918b3e5f7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/noxfile.py +++ /dev/null @@ -1,601 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil - -from typing import Dict, List -import warnings - -import nox - -BLACK_VERSION = "black[jupyter]==23.7.0" -ISORT_VERSION = "isort==5.11.0" - -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", - "3.14", -] - -DEFAULT_PYTHON_VERSION = "3.14" - -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): -# Switch this to Python 3.15 alpha1 -# https://peps.python.org/pep-0790/ -PREVIEW_PYTHON_VERSION = "3.14" - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = "google-cloud-spanner-admin-instance" - -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] -UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_DEPENDENCIES: List[str] = [] -SYSTEM_TEST_EXTRAS: List[str] = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} - -nox.options.sessions = [ - "unit", - "system", - "cover", - "lint", - "lint_setup_py", - "blacken", - "docs", -] - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy - "mypy<1.16.0", - "types-requests", - "types-protobuf", - ) - session.install(".") - session.run( - "mypy", - "-p", - "google", - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "update", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install("google-cloud-testutils") - session.install(".") - - session.run( - "lower-bound-checker", - "check", - "--package-name", - PACKAGE_NAME, - "--constraints-file", - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *LINT_PATHS, - ) - - session.run("flake8", "google", "tests") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def format(session): - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run( - "isort", - "--fss", - *LINT_PATHS, - ) - session.run( - "black", - *LINT_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("setuptools", "docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - - -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def unit(session, protobuf_implementation): - # Install all test dependencies, then install this package in-place. - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - install_unittest_dependencies(session, "-c", constraints_path) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -def install_systemtest_dependencies(session, *constraints): - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def system(session): - """Run the system test suite.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. - if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": - session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Install pyopenssl for mTLS testing. - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - - system_test_exists = os.path.exists(system_test_path) - system_test_folder_exists = os.path.exists(system_test_folder_path) - # Sanity check: only run tests if found. - if not system_test_exists and not system_test_folder_exists: - session.skip("System tests were not found") - - install_systemtest_dependencies(session, "-c", constraints_path) - - # Run py.test against the system tests. - if system_test_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if system_test_folder_exists: - session.run( - "py.test", - "--quiet", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python="3.10") -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "sphinx==4.5.0", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", "html", # builder - "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory - # paths to build: - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python="3.10") -def docfx(session): - """Build the docfx yaml files for this library.""" - - session.install("-e", ".") - session.install( - # We need to pin to specific versions of the `sphinxcontrib-*` packages - # which still support sphinx 4.x. - # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 - # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. - "sphinxcontrib-applehelp==1.0.4", - "sphinxcontrib-devhelp==1.0.2", - "sphinxcontrib-htmlhelp==2.0.1", - "sphinxcontrib-qthelp==1.0.3", - "sphinxcontrib-serializinghtml==1.1.5", - "gcp-sphinx-docfx-yaml", - "alabaster", - "recommonmark", - ) - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-T", # show full traceback on exception - "-N", # no colors - "-D", - ( - "extensions=sphinx.ext.autodoc," - "sphinx.ext.autosummary," - "docfx_yaml.extension," - "sphinx.ext.intersphinx," - "sphinx.ext.coverage," - "sphinx.ext.napoleon," - "sphinx.ext.todo," - "sphinx.ext.viewcode," - "recommonmark" - ), - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=PREVIEW_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb", "cpp"], -) -def prerelease_deps(session, protobuf_implementation): - """ - Run all tests with pre-release versions of dependencies installed - rather than the standard non pre-release versions. - Pre-release versions can be installed using - `pip install --pre `. - """ - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): - # Remove this check once support for Protobuf 3.x is dropped. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # Note: If a dependency is added to the `prerel_deps` list, - # the `core_dependencies_from_source` list in the `core_deps_from_source` - # nox session should also be updated. - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpc-google-iam-v1", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--ignore-installed", dep) - # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` - # to the dictionary below once this bug is fixed. - # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add - # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below - # once this bug is fixed. - package_namespaces = { - "google-api-core": "google.api_core", - "google-auth": "google.auth", - "grpcio": "grpc", - "protobuf": "google.protobuf", - "proto-plus": "proto", - } - - version_namespace = package_namespaces.get(dep) - - print(f"Installed {dep}") - if version_namespace: - session.run( - "python", - "-c", - f"import {version_namespace}; print({version_namespace}.__version__)", - ) - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -@nox.parametrize( - "protobuf_implementation", - ["python", "upb"], -) -def core_deps_from_source(session, protobuf_implementation): - """Run all tests with core dependencies installed from source - rather than pulling the dependencies from PyPI. - """ - - # Install all dependencies - session.install("-e", ".") - - # Install dependencies for the unit test environment - unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES - session.install(*unit_deps_all) - - # Install dependencies for the system test environment - system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS - ) - session.install(*system_deps_all) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # Install dependencies specified in `testing/constraints-X.txt`. - session.install(*constraints_deps) - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and - # `grpcio-status` should be added to the list below so that they are installed from source, - # rather than PyPI. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be - # added to the list below so that it is installed from source, rather than PyPI - # Note: If a dependency is added to the `core_dependencies_from_source` list, - # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. - core_dependencies_from_source = [ - "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", - "google-api-core @ git+https://github.com/googleapis/python-api-core.git", - "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", - "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", - "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", - ] - - for dep in core_dependencies_from_source: - session.install(dep, "--no-deps", "--ignore-installed") - print(f"Installed {dep}") - - session.run( - "py.test", - "tests/unit", - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json deleted file mode 100644 index f58e9794e2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json +++ /dev/null @@ -1,3450 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.spanner.admin.instance.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-spanner-admin-instance", - "version": "0.0.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "instance_config_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance_config" - }, - "description": "Sample for CreateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_create_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "instance_config_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance_config" - }, - "description": "Sample for CreateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_create_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "instance_partition_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance_partition" - }, - "description": "Sample for CreateInstancePartition", - "file": "spanner_v1_generated_instance_admin_create_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstancePartitionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "instance_partition_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance_partition" - }, - "description": "Sample for CreateInstancePartition", - "file": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.create_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "spanner_v1_generated_instance_admin_create_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.create_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "CreateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.CreateInstanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "instance_id", - "type": "str" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_instance" - }, - "description": "Sample for CreateInstance", - "file": "spanner_v1_generated_instance_admin_create_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_CreateInstance_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_create_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_config" - }, - "description": "Sample for DeleteInstanceConfig", - "file": "spanner_v1_generated_instance_admin_delete_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_config" - }, - "description": "Sample for DeleteInstanceConfig", - "file": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_partition" - }, - "description": "Sample for DeleteInstancePartition", - "file": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance_partition" - }, - "description": "Sample for DeleteInstancePartition", - "file": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.delete_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "spanner_v1_generated_instance_admin_delete_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.delete_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "DeleteInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.DeleteInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_instance" - }, - "description": "Sample for DeleteInstance", - "file": "spanner_v1_generated_instance_admin_delete_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_DeleteInstance_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_delete_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_instance_admin_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", - "shortName": "get_instance_config" - }, - "description": "Sample for GetInstanceConfig", - "file": "spanner_v1_generated_instance_admin_get_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig", - "shortName": "get_instance_config" - }, - "description": "Sample for GetInstanceConfig", - "file": "spanner_v1_generated_instance_admin_get_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", - "shortName": "get_instance_partition" - }, - "description": "Sample for GetInstancePartition", - "file": "spanner_v1_generated_instance_admin_get_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstancePartitionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.InstancePartition", - "shortName": "get_instance_partition" - }, - "description": "Sample for GetInstancePartition", - "file": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.get_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "spanner_v1_generated_instance_admin_get_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.get_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.GetInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "spanner_v1_generated_instance_admin_get_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_GetInstance_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_get_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_config_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsAsyncPager", - "shortName": "list_instance_config_operations" - }, - "description": "Sample for ListInstanceConfigOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_config_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigOperationsPager", - "shortName": "list_instance_config_operations" - }, - "description": "Sample for ListInstanceConfigOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_configs", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsAsyncPager", - "shortName": "list_instance_configs" - }, - "description": "Sample for ListInstanceConfigs", - "file": "spanner_v1_generated_instance_admin_list_instance_configs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_configs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_configs", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstanceConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstanceConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstanceConfigsPager", - "shortName": "list_instance_configs" - }, - "description": "Sample for ListInstanceConfigs", - "file": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_configs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partition_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitionOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsAsyncPager", - "shortName": "list_instance_partition_operations" - }, - "description": "Sample for ListInstancePartitionOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partition_operations", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitionOperations", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitionOperations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionOperationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionOperationsPager", - "shortName": "list_instance_partition_operations" - }, - "description": "Sample for ListInstancePartitionOperations", - "file": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instance_partitions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsAsyncPager", - "shortName": "list_instance_partitions" - }, - "description": "Sample for ListInstancePartitions", - "file": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partitions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instance_partitions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstancePartitions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstancePartitions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancePartitionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancePartitionsPager", - "shortName": "list_instance_partitions" - }, - "description": "Sample for ListInstancePartitions", - "file": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instance_partitions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.list_instances", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesAsyncPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "spanner_v1_generated_instance_admin_list_instances_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instances_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.list_instances", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.ListInstances", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.spanner_admin_instance_v1.services.instance_admin.pagers.ListInstancesPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "spanner_v1_generated_instance_admin_list_instances_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_ListInstances_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_list_instances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.move_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "MoveInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "move_instance" - }, - "description": "Sample for MoveInstance", - "file": "spanner_v1_generated_instance_admin_move_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_move_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.move_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.MoveInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "MoveInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.MoveInstanceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "move_instance" - }, - "description": "Sample for MoveInstance", - "file": "spanner_v1_generated_instance_admin_move_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_MoveInstance_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_move_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_instance_admin_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.set_iam_policy", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.SetIamPolicy", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.test_iam_permissions", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.TestIamPermissions", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance_config" - }, - "description": "Sample for UpdateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_update_instance_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_config", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstanceConfig", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstanceConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceConfigRequest" - }, - { - "name": "instance_config", - "type": "google.cloud.spanner_admin_instance_v1.types.InstanceConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance_config" - }, - "description": "Sample for UpdateInstanceConfig", - "file": "spanner_v1_generated_instance_admin_update_instance_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance_partition" - }, - "description": "Sample for UpdateInstancePartition", - "file": "spanner_v1_generated_instance_admin_update_instance_partition_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_partition_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance_partition", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstancePartition", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstancePartition" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstancePartitionRequest" - }, - { - "name": "instance_partition", - "type": "google.cloud.spanner_admin_instance_v1.types.InstancePartition" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance_partition" - }, - "description": "Sample for UpdateInstancePartition", - "file": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_partition_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient", - "shortName": "InstanceAdminAsyncClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminAsyncClient.update_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "spanner_v1_generated_instance_admin_update_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient", - "shortName": "InstanceAdminClient" - }, - "fullName": "google.cloud.spanner_admin_instance_v1.InstanceAdminClient.update_instance", - "method": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance", - "service": { - "fullName": "google.spanner.admin.instance.v1.InstanceAdmin", - "shortName": "InstanceAdmin" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.spanner_admin_instance_v1.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.spanner_admin_instance_v1.types.Instance" - }, - { - "name": "field_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "spanner_v1_generated_instance_admin_update_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "spanner_v1_generated_InstanceAdmin_UpdateInstance_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "spanner_v1_generated_instance_admin_update_instance_sync.py" - } - ] -} diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py deleted file mode 100644 index 74bd640044..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py deleted file mode 100644 index c3f266e4c4..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py deleted file mode 100644 index c5b7616534..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_config_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_create_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.CreateInstanceConfigRequest( - parent="parent_value", - instance_config_id="instance_config_id_value", - ) - - # Make the request - operation = client.create_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py deleted file mode 100644 index a22765f53f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py deleted file mode 100644 index 5b5f2e0e26..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_partition_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_create_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstancePartitionRequest( - parent="parent_value", - instance_partition_id="instance_partition_id_value", - instance_partition=instance_partition, - ) - - # Make the request - operation = client.create_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py deleted file mode 100644 index f43c5016b5..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_create_instance_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_CreateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_create_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.CreateInstanceRequest( - parent="parent_value", - instance_id="instance_id_value", - instance=instance, - ) - - # Make the request - operation = client.create_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_CreateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py deleted file mode 100644 index 262da709aa..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py deleted file mode 100644 index df83d9e424..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_config(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py deleted file mode 100644 index 9a9c4d7ca1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_config_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_delete_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_config(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py deleted file mode 100644 index 78ca44d6c2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - await client.delete_instance_partition(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py deleted file mode 100644 index 72249ef6c7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_partition_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_delete_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstancePartitionRequest( - name="name_value", - ) - - # Make the request - client.delete_instance_partition(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py deleted file mode 100644 index 613ac6c070..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_delete_instance_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_delete_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.DeleteInstanceRequest( - name="name_value", - ) - - # Make the request - client.delete_instance(request=request) - - -# [END spanner_v1_generated_InstanceAdmin_DeleteInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py deleted file mode 100644 index a0b620ae4f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py deleted file mode 100644 index cc0d725a03..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py deleted file mode 100644 index 059eb2a078..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py deleted file mode 100644 index 9adfb51c2e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_config(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py deleted file mode 100644 index 16e9d3c3c8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_get_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_config(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py deleted file mode 100644 index 8e84abcf6e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance_partition(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py deleted file mode 100644 index d617cbb382..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_partition_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_get_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstancePartitionRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance_partition(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py deleted file mode 100644 index 4a246a5bf3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_get_instance_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_GetInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_get_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_GetInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py deleted file mode 100644 index a0580fef7c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py deleted file mode 100644 index 89213b3a2e..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_config_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_config_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_config_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py deleted file mode 100644 index 651b2f88ae..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py deleted file mode 100644 index a0f120277a..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_configs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstanceConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_configs(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstanceConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstanceConfigs_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py deleted file mode 100644 index 9dedb973f1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitionOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py deleted file mode 100644 index b2a7549b29..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partition_operations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitionOperations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_partition_operations(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionOperationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partition_operations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitionOperations_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py deleted file mode 100644 index 56adc152fe..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py deleted file mode 100644 index 1e65552fc1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instance_partitions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstancePartitions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instance_partitions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancePartitionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instance_partitions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstancePartitions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py deleted file mode 100644 index abe1a1affa..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstances_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstances_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py deleted file mode 100644 index f344baff11..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_list_instances_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_ListInstances_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_list_instances(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END spanner_v1_generated_InstanceAdmin_ListInstances_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py deleted file mode 100644 index ce62120492..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MoveInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_MoveInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_MoveInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py deleted file mode 100644 index 4621200e0c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_move_instance_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MoveInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_MoveInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_move_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.MoveInstanceRequest( - name="name_value", - target_config="target_config_value", - ) - - # Make the request - operation = client.move_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_MoveInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py deleted file mode 100644 index 2443f2127d..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py deleted file mode 100644 index ba6401602f..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_SetIamPolicy_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py deleted file mode 100644 index aa0e05dde3..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py deleted file mode 100644 index 80b2a4dd21..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_TestIamPermissions_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py deleted file mode 100644 index ecabbf5191..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py deleted file mode 100644 index f7ea78401c..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py deleted file mode 100644 index 1d184f6c58..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_config_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstanceConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_update_instance_config(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - request = spanner_admin_instance_v1.UpdateInstanceConfigRequest( - ) - - # Make the request - operation = client.update_instance_config(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstanceConfig_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py deleted file mode 100644 index 42d3c484f8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -async def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminAsyncClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_async] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py deleted file mode 100644 index 56cd2760a1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_partition_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstancePartition -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_update_instance_partition(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance_partition = spanner_admin_instance_v1.InstancePartition() - instance_partition.node_count = 1070 - instance_partition.name = "name_value" - instance_partition.config = "config_value" - instance_partition.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstancePartitionRequest( - instance_partition=instance_partition, - ) - - # Make the request - operation = client.update_instance_partition(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstancePartition_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py b/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py deleted file mode 100644 index 2340e701e1..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/samples/generated_samples/spanner_v1_generated_instance_admin_update_instance_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-spanner-admin-instance - - -# [START spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import spanner_admin_instance_v1 - - -def sample_update_instance(): - # Create a client - client = spanner_admin_instance_v1.InstanceAdminClient() - - # Initialize request argument(s) - instance = spanner_admin_instance_v1.Instance() - instance.name = "name_value" - instance.config = "config_value" - instance.display_name = "display_name_value" - - request = spanner_admin_instance_v1.UpdateInstanceRequest( - instance=instance, - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END spanner_v1_generated_InstanceAdmin_UpdateInstance_sync] diff --git a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py b/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py deleted file mode 100644 index 8200af5099..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/scripts/fixup_spanner_admin_instance_v1_keywords.py +++ /dev/null @@ -1,196 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class spanner_admin_instanceCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'create_instance_config': ('parent', 'instance_config_id', 'instance_config', 'validate_only', ), - 'create_instance_partition': ('parent', 'instance_partition_id', 'instance_partition', ), - 'delete_instance': ('name', ), - 'delete_instance_config': ('name', 'etag', 'validate_only', ), - 'delete_instance_partition': ('name', 'etag', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_instance': ('name', 'field_mask', ), - 'get_instance_config': ('name', ), - 'get_instance_partition': ('name', ), - 'list_instance_config_operations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_instance_configs': ('parent', 'page_size', 'page_token', ), - 'list_instance_partition_operations': ('parent', 'filter', 'page_size', 'page_token', 'instance_partition_deadline', ), - 'list_instance_partitions': ('parent', 'page_size', 'page_token', 'instance_partition_deadline', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'instance_deadline', ), - 'move_instance': ('name', 'target_config', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_instance': ('instance', 'field_mask', ), - 'update_instance_config': ('instance_config', 'update_mask', 'validate_only', ), - 'update_instance_partition': ('instance_partition', 'field_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=spanner_admin_instanceCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the spanner_admin_instance client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/spanner_admin_instance/v1/setup.py b/owl-bot-staging/spanner_admin_instance/v1/setup.py deleted file mode 100644 index 8d7bb912c2..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/setup.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-spanner-admin-instance' - - -description = "Google Cloud Spanner Admin Instance API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/spanner_admin_instance/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", - "grpcio >= 1.33.2, < 2.0.0", - "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "proto-plus >= 1.22.3, <2.0.0", - "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-spanner-admin-instance" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt deleted file mode 100644 index 2ae5a677e8..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.14.txt +++ /dev/null @@ -1,13 +0,0 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. -# List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt deleted file mode 100644 index 5b1ee6c35a..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,13 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) -# Add the minimum supported version of grpcio to constraints files -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt b/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt deleted file mode 100644 index ef1c92ffff..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py deleted file mode 100644 index 191773d557..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py deleted file mode 100644 index 82c9940cf7..0000000000 --- a/owl-bot-staging/spanner_admin_instance/v1/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py +++ /dev/null @@ -1,17636 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import re -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminAsyncClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin import InstanceAdminClient -from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers -from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports -from google.cloud.spanner_admin_instance_v1.types import common -from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert InstanceAdminClient._get_default_mtls_endpoint(None) is None - assert InstanceAdminClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert InstanceAdminClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert InstanceAdminClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - InstanceAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert InstanceAdminClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert InstanceAdminClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - InstanceAdminClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert InstanceAdminClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert InstanceAdminClient._get_client_cert_source(None, False) is None - assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert InstanceAdminClient._get_client_cert_source(None, True) is mock_default_cert_source - assert InstanceAdminClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = InstanceAdminClient._DEFAULT_UNIVERSE - default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert InstanceAdminClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - assert InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == InstanceAdminClient.DEFAULT_MTLS_ENDPOINT - assert InstanceAdminClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert InstanceAdminClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - InstanceAdminClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert InstanceAdminClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert InstanceAdminClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert InstanceAdminClient._get_universe_domain(None, None) == InstanceAdminClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - InstanceAdminClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = InstanceAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = InstanceAdminClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (InstanceAdminClient, "grpc"), - (InstanceAdminAsyncClient, "grpc_asyncio"), - (InstanceAdminClient, "rest"), -]) -def test_instance_admin_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.InstanceAdminGrpcTransport, "grpc"), - (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.InstanceAdminRestTransport, "rest"), -]) -def test_instance_admin_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (InstanceAdminClient, "grpc"), - (InstanceAdminAsyncClient, "grpc_asyncio"), - (InstanceAdminClient, "rest"), -]) -def test_instance_admin_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://spanner.googleapis.com' - ) - - -def test_instance_admin_client_get_transport_class(): - transport = InstanceAdminClient.get_transport_class() - available_transports = [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminRestTransport, - ] - assert transport in available_transports - - transport = InstanceAdminClient.get_transport_class("grpc") - assert transport == transports.InstanceAdminGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), -]) -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -def test_instance_admin_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(InstanceAdminClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "true"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", "false"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "true"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", "false"), -]) -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_instance_admin_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - InstanceAdminClient, InstanceAdminAsyncClient -]) -@mock.patch.object(InstanceAdminClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(InstanceAdminAsyncClient)) -def test_instance_admin_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - InstanceAdminClient, InstanceAdminAsyncClient -]) -@mock.patch.object(InstanceAdminClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminClient)) -@mock.patch.object(InstanceAdminAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(InstanceAdminAsyncClient)) -def test_instance_admin_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = InstanceAdminClient._DEFAULT_UNIVERSE - default_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = InstanceAdminClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc"), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest"), -]) -def test_instance_admin_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (InstanceAdminClient, transports.InstanceAdminRestTransport, "rest", None), -]) -def test_instance_admin_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_instance_admin_client_client_options_from_dict(): - with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = InstanceAdminClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport, "grpc", grpc_helpers), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_instance_admin_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=None, - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigsRequest, - dict, -]) -def test_list_instance_configs(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_instance_configs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstanceConfigsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_configs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_instance_configs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc - request = {} - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_configs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_configs] = mock_rpc - - request = {} - await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_configs_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_instance_configs_async_from_dict(): - await test_list_instance_configs_async(request_type=dict) - -def test_list_instance_configs_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_configs_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) - await client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_configs_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_configs_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_configs_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_configs_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - - -def test_list_instance_configs_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_configs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in results) -def test_list_instance_configs_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_configs_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_configs_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_configs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceConfigRequest, - dict, -]) -def test_get_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - ) - response = client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED - assert response.base_config == 'base_config_value' - assert response.etag == 'etag_value' - assert response.leader_options == ['leader_options_value'] - assert response.reconciling is True - assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING - assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE - assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION - assert response.storage_limit_per_processing_unit == 3540 - - -def test_get_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.GetInstanceConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceConfigRequest( - name='name_value', - ) - -def test_get_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc - request = {} - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance_config] = mock_rpc - - request = {} - await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - )) - response = await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED - assert response.base_config == 'base_config_value' - assert response.etag == 'etag_value' - assert response.leader_options == ['leader_options_value'] - assert response.reconciling is True - assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING - assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE - assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION - assert response.storage_limit_per_processing_unit == 3540 - - -@pytest.mark.asyncio -async def test_get_instance_config_async_from_dict(): - await test_get_instance_config_async(request_type=dict) - -def test_get_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = spanner_instance_admin.InstanceConfig() - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) - await client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstanceConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceConfigRequest, - dict, -]) -def test_create_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.CreateInstanceConfigRequest( - parent='parent_value', - instance_config_id='instance_config_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceConfigRequest( - parent='parent_value', - instance_config_id='instance_config_id_value', - ) - -def test_create_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc - request = {} - client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance_config] = mock_rpc - - request = {} - await client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_config_async_from_dict(): - await test_create_instance_config_async(request_type=dict) - -def test_create_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance_config( - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].instance_config_id - mock_val = 'instance_config_id_value' - assert arg == mock_val - - -def test_create_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_config( - spanner_instance_admin.CreateInstanceConfigRequest(), - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - -@pytest.mark.asyncio -async def test_create_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance_config( - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].instance_config_id - mock_val = 'instance_config_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance_config( - spanner_instance_admin.CreateInstanceConfigRequest(), - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceConfigRequest, - dict, -]) -def test_update_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.UpdateInstanceConfigRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceConfigRequest( - ) - -def test_update_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc - request = {} - client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance_config] = mock_rpc - - request = {} - await client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_config_async_from_dict(): - await test_update_instance_config_async(request_type=dict) - -def test_update_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceConfigRequest() - - request.instance_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_config.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceConfigRequest() - - request.instance_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_config.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance_config( - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_config( - spanner_instance_admin.UpdateInstanceConfigRequest(), - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance_config( - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance_config - mock_val = spanner_instance_admin.InstanceConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance_config( - spanner_instance_admin.UpdateInstanceConfigRequest(), - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceConfigRequest, - dict, -]) -def test_delete_instance_config(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.DeleteInstanceConfigRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceConfigRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_instance_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc - request = {} - client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_instance_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance_config] = mock_rpc - - request = {} - await client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_config_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceConfigRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_config_async_from_dict(): - await test_delete_instance_config_async(request_type=dict) - -def test_delete_instance_config_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value = None - client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_config_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_instance_config_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_instance_config_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_config( - spanner_instance_admin.DeleteInstanceConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_instance_config_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_instance_config_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance_config( - spanner_instance_admin.DeleteInstanceConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigOperationsRequest, - dict, -]) -def test_list_instance_config_operations(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_instance_config_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstanceConfigOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_config_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstanceConfigOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_instance_config_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_config_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc - request = {} - client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_config_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_config_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_config_operations] = mock_rpc - - request = {} - await client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_config_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_from_dict(): - await test_list_instance_config_operations_async(request_type=dict) - -def test_list_instance_config_operations_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_config_operations_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) - await client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_config_operations_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_config_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_config_operations_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_config_operations( - spanner_instance_admin.ListInstanceConfigOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_config_operations_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_config_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_config_operations_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_config_operations( - spanner_instance_admin.ListInstanceConfigOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_config_operations_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_config_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_instance_config_operations_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_config_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_config_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_config_operations_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_config_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancesRequest, - dict, -]) -def test_list_instances(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_instances_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instances(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_instances_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - request = {} - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instances in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc - - request = {} - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancesRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) - -def test_list_instances_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instances_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instances_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instances_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instances_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instances(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in results) -def test_list_instances_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instances_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instances(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instances_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionsRequest, - dict, -]) -def test_list_instance_partitions(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_instance_partitions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstancePartitionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_partitions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancePartitionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_instance_partitions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc - request = {} - client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_partitions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_partitions] = mock_rpc - - request = {} - await client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partitions_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_from_dict(): - await test_list_instance_partitions_async(request_type=dict) - -def test_list_instance_partitions_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_partitions_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) - await client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_partitions_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_partitions_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partitions( - spanner_instance_admin.ListInstancePartitionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_partitions_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_partitions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_partitions_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_partitions( - spanner_instance_admin.ListInstancePartitionsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partitions_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_partitions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstancePartition) - for i in results) -def test_list_instance_partitions_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_partitions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_partitions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, spanner_instance_admin.InstancePartition) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_partitions_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_partitions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceRequest, - dict, -]) -def test_get_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - ) - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED - assert response.endpoint_uris == ['endpoint_uris_value'] - assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD - assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE - - -def test_get_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.GetInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstanceRequest( - name='name_value', - ) - -def test_get_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - request = {} - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc - - request = {} - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - )) - response = await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED - assert response.endpoint_uris == ['endpoint_uris_value'] - assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD - assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE - - -@pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) - -def test_get_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = spanner_instance_admin.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceRequest, - dict, -]) -def test_create_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', - ) - -def test_create_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc - request = {} - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc - - request = {} - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_async_from_dict(): - await test_create_instance_async(request_type=dict) - -def test_create_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstanceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_id - mock_val = 'instance_id_value' - assert arg == mock_val - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - - -def test_create_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_id - mock_val = 'instance_id_value' - assert arg == mock_val - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceRequest, - dict, -]) -def test_update_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.UpdateInstanceRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstanceRequest( - ) - -def test_update_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - request = {} - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc - - request = {} - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_async_from_dict(): - await test_update_instance_async(request_type=dict) - -def test_update_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceRequest() - - request.instance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstanceRequest() - - request.instance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = spanner_instance_admin.Instance(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceRequest, - dict, -]) -def test_delete_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.DeleteInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstanceRequest( - name='name_value', - ) - -def test_delete_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc - request = {} - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc - - request = {} - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_async_from_dict(): - await test_delete_instance_async(request_type=dict) - -def test_delete_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = None - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_instance_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_instance_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_instance_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_instance_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - - -def test_test_iam_permissions_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstancePartitionRequest, - dict, -]) -def test_get_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - node_count=1070, - ) - response = client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstancePartition) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.state == spanner_instance_admin.InstancePartition.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.referencing_backups == ['referencing_backups_value'] - assert response.etag == 'etag_value' - - -def test_get_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.GetInstancePartitionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.GetInstancePartitionRequest( - name='name_value', - ) - -def test_get_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc - request = {} - client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance_partition] = mock_rpc - - request = {} - await client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.GetInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - )) - response = await client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.GetInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstancePartition) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.state == spanner_instance_admin.InstancePartition.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.referencing_backups == ['referencing_backups_value'] - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_instance_partition_async_from_dict(): - await test_get_instance_partition_async(request_type=dict) - -def test_get_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value = spanner_instance_admin.InstancePartition() - client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.GetInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) - await client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstancePartition() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_partition( - spanner_instance_admin.GetInstancePartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.InstancePartition() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance_partition( - spanner_instance_admin.GetInstancePartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstancePartitionRequest, - dict, -]) -def test_create_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.CreateInstancePartitionRequest( - parent='parent_value', - instance_partition_id='instance_partition_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.CreateInstancePartitionRequest( - parent='parent_value', - instance_partition_id='instance_partition_id_value', - ) - -def test_create_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc - request = {} - client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_instance_partition] = mock_rpc - - request = {} - await client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.CreateInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.CreateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_instance_partition_async_from_dict(): - await test_create_instance_partition_async(request_type=dict) - -def test_create_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstancePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.CreateInstancePartitionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_instance_partition( - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].instance_partition_id - mock_val = 'instance_partition_id_value' - assert arg == mock_val - - -def test_create_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_partition( - spanner_instance_admin.CreateInstancePartitionRequest(), - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - -@pytest.mark.asyncio -async def test_create_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_instance_partition( - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].instance_partition_id - mock_val = 'instance_partition_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_instance_partition( - spanner_instance_admin.CreateInstancePartitionRequest(), - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstancePartitionRequest, - dict, -]) -def test_delete_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.DeleteInstancePartitionRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.DeleteInstancePartitionRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc - request = {} - client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance_partition] = mock_rpc - - request = {} - await client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.DeleteInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.DeleteInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_instance_partition_async_from_dict(): - await test_delete_instance_partition_async(request_type=dict) - -def test_delete_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value = None - client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.DeleteInstancePartitionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_partition( - spanner_instance_admin.DeleteInstancePartitionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_instance_partition( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_instance_partition( - spanner_instance_admin.DeleteInstancePartitionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstancePartitionRequest, - dict, -]) -def test_update_instance_partition(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_partition_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.UpdateInstancePartitionRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance_partition(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.UpdateInstancePartitionRequest( - ) - -def test_update_instance_partition_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc - request = {} - client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_partition_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance_partition in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance_partition] = mock_rpc - - request = {} - await client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_partition_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.UpdateInstancePartitionRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.UpdateInstancePartitionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_partition_async_from_dict(): - await test_update_instance_partition_async(request_type=dict) - -def test_update_instance_partition_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstancePartitionRequest() - - request.instance_partition.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_partition.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_partition_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.UpdateInstancePartitionRequest() - - request.instance_partition.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance_partition.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_partition_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance_partition( - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_partition_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_partition( - spanner_instance_admin.UpdateInstancePartitionRequest(), - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_partition_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance_partition( - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance_partition - mock_val = spanner_instance_admin.InstancePartition(name='name_value') - assert arg == mock_val - arg = args[0].field_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_partition_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance_partition( - spanner_instance_admin.UpdateInstancePartitionRequest(), - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionOperationsRequest, - dict, -]) -def test_list_instance_partition_operations(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - ) - response = client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionOperationsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] - - -def test_list_instance_partition_operations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.ListInstancePartitionOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instance_partition_operations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.ListInstancePartitionOperationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_instance_partition_operations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc - request = {} - client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partition_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instance_partition_operations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instance_partition_operations] = mock_rpc - - request = {} - await client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instance_partition_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - )) - response = await client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionOperationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] - - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_from_dict(): - await test_list_instance_partition_operations_async(request_type=dict) - -def test_list_instance_partition_operations_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) - await client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instance_partition_operations_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instance_partition_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instance_partition_operations_flattened_error(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partition_operations( - spanner_instance_admin.ListInstancePartitionOperationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_flattened_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instance_partition_operations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_flattened_error_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instance_partition_operations( - spanner_instance_admin.ListInstancePartitionOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partition_operations_pager(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instance_partition_operations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) -def test_list_instance_partition_operations_pages(transport_name: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instance_partition_operations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_pager(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instance_partition_operations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instance_partition_operations_async_pages(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instance_partition_operations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.MoveInstanceRequest, - dict, -]) -def test_move_instance(request_type, transport: str = 'grpc'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.MoveInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_move_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = spanner_instance_admin.MoveInstanceRequest( - name='name_value', - target_config='target_config_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.move_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == spanner_instance_admin.MoveInstanceRequest( - name='name_value', - target_config='target_config_value', - ) - -def test_move_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.move_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc - request = {} - client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.move_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_move_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.move_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.move_instance] = mock_rpc - - request = {} - await client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.move_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_move_instance_async(transport: str = 'grpc_asyncio', request_type=spanner_instance_admin.MoveInstanceRequest): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = spanner_instance_admin.MoveInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_move_instance_async_from_dict(): - await test_move_instance_async(request_type=dict) - -def test_move_instance_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.MoveInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_move_instance_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = spanner_instance_admin.MoveInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_list_instance_configs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_configs] = mock_rpc - - request = {} - client.list_instance_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_configs_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_configs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_configs_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_configs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_configs_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_configs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) - - -def test_list_instance_configs_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_configs( - spanner_instance_admin.ListInstanceConfigsRequest(), - parent='parent_value', - ) - - -def test_list_instance_configs_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigsResponse( - instance_configs=[ - spanner_instance_admin.InstanceConfig(), - spanner_instance_admin.InstanceConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstanceConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_instance_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstanceConfig) - for i in results) - - pages = list(client.list_instance_configs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_config] = mock_rpc - - request = {} - client.get_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_config_rest_required_fields(request_type=spanner_instance_admin.GetInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstanceConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstanceConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) - - -def test_get_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_config( - spanner_instance_admin.GetInstanceConfigRequest(), - name='name_value', - ) - - -def test_create_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_config] = mock_rpc - - request = {} - client.create_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_instance_config_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_config_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceConfigId"] = 'instance_config_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instanceConfigId" in jsonified_request - assert jsonified_request["instanceConfigId"] == 'instance_config_id_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "instanceConfigId", "instanceConfig", ))) - - -def test_create_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigs" % client.transport._host, args[1]) - - -def test_create_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_config( - spanner_instance_admin.CreateInstanceConfigRequest(), - parent='parent_value', - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - instance_config_id='instance_config_id_value', - ) - - -def test_update_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_config] = mock_rpc - - request = {} - client.update_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_config_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instanceConfig", "updateMask", ))) - - -def test_update_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} - - # get truthy value for each flattened field - mock_args = dict( - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance_config.name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) - - -def test_update_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_config( - spanner_instance_admin.UpdateInstanceConfigRequest(), - instance_config=spanner_instance_admin.InstanceConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_instance_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_config] = mock_rpc - - request = {} - client.delete_instance_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_instance_config_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_instance_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_instance_config_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_instance_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", "validateOnly", )) & set(("name", ))) - - -def test_delete_instance_config_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instanceConfigs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_instance_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instanceConfigs/*}" % client.transport._host, args[1]) - - -def test_delete_instance_config_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_config( - spanner_instance_admin.DeleteInstanceConfigRequest(), - name='name_value', - ) - - -def test_list_instance_config_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_config_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_config_operations] = mock_rpc - - request = {} - client.list_instance_config_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_config_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_config_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_config_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_config_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_config_operations_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_config_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_config_operations_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_config_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instanceConfigOperations" % client.transport._host, args[1]) - - -def test_list_instance_config_operations_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_config_operations( - spanner_instance_admin.ListInstanceConfigOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_config_operations_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstanceConfigOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_instance_config_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_instance_config_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_instances_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - - request = {} - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instances_rest_required_fields(request_type=spanner_instance_admin.ListInstancesRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "instance_deadline", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instances(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instances_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "instanceDeadline", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instances_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instances(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) - - -def test_list_instances_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - spanner_instance_admin.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancesResponse( - instances=[ - spanner_instance_admin.Instance(), - spanner_instance_admin.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstancesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_instances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.Instance) - for i in results) - - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_instance_partitions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partitions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partitions] = mock_rpc - - request = {} - client.list_instance_partitions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partitions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_partitions_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partitions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("instance_partition_deadline", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_partitions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_partitions_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_partitions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_partitions_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_partitions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) - - -def test_list_instance_partitions_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partitions( - spanner_instance_admin.ListInstancePartitionsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partitions_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionsResponse( - instance_partitions=[ - spanner_instance_admin.InstancePartition(), - spanner_instance_admin.InstancePartition(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstancePartitionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_instance_partitions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, spanner_instance_admin.InstancePartition) - for i in results) - - pages = list(client.list_instance_partitions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - - request = {} - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_rest_required_fields(request_type=spanner_instance_admin.GetInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("field_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.Instance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("fieldMask", )) & set(("name", ))) - - -def test_get_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.Instance() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) - - -def test_get_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - spanner_instance_admin.GetInstanceRequest(), - name='name_value', - ) - - -def test_create_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc - - request = {} - client.create_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_instance_rest_required_fields(request_type=spanner_instance_admin.CreateInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "instanceId", "instance", ))) - - -def test_create_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*}/instances" % client.transport._host, args[1]) - - -def test_create_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance( - spanner_instance_admin.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=spanner_instance_admin.Instance(name='name_value'), - ) - - -def test_update_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - - request = {} - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_rest_required_fields(request_type=spanner_instance_admin.UpdateInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instance", "fieldMask", ))) - - -def test_update_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/instances/sample2'}} - - # get truthy value for each flattened field - mock_args = dict( - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance.name=projects/*/instances/*}" % client.transport._host, args[1]) - - -def test_update_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - spanner_instance_admin.UpdateInstanceRequest(), - instance=spanner_instance_admin.Instance(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc - - request = {} - client.delete_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_instance_rest_required_fields(request_type=spanner_instance_admin.DeleteInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_instance_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*}" % client.transport._host, args[1]) - - -def test_delete_instance_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance( - spanner_instance_admin.DeleteInstanceRequest(), - name='name_value', - ) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_set_iam_policy_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.set_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:setIamPolicy" % client.transport._host, args[1]) - - -def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", ))) - - -def test_get_iam_policy_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:getIamPolicy" % client.transport._host, args[1]) - - -def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_test_iam_permissions_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - permissions=['permissions_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.test_iam_permissions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/instances/*}:testIamPermissions" % client.transport._host, args[1]) - - -def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_get_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance_partition] = mock_rpc - - request = {} - client.get_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_partition_rest_required_fields(request_type=spanner_instance_admin.GetInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstancePartition() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstancePartition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstancePartition() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstancePartition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) - - -def test_get_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance_partition( - spanner_instance_admin.GetInstancePartitionRequest(), - name='name_value', - ) - - -def test_create_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_instance_partition] = mock_rpc - - request = {} - client.create_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_instance_partition_rest_required_fields(request_type=spanner_instance_admin.CreateInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["instance_partition_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["instancePartitionId"] = 'instance_partition_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "instancePartitionId" in jsonified_request - assert jsonified_request["instancePartitionId"] == 'instance_partition_id_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "instancePartitionId", "instancePartition", ))) - - -def test_create_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitions" % client.transport._host, args[1]) - - -def test_create_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_instance_partition( - spanner_instance_admin.CreateInstancePartitionRequest(), - parent='parent_value', - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - instance_partition_id='instance_partition_id_value', - ) - - -def test_delete_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_instance_partition] = mock_rpc - - request = {} - client.delete_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_instance_partition_rest_required_fields(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance_partition._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) - - -def test_delete_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_instance_partition( - spanner_instance_admin.DeleteInstancePartitionRequest(), - name='name_value', - ) - - -def test_update_instance_partition_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance_partition in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance_partition] = mock_rpc - - request = {} - client.update_instance_partition(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance_partition(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_partition_rest_required_fields(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance_partition._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance_partition(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_partition_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance_partition._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instancePartition", "fieldMask", ))) - - -def test_update_instance_partition_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance_partition(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{instance_partition.name=projects/*/instances/*/instancePartitions/*}" % client.transport._host, args[1]) - - -def test_update_instance_partition_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance_partition( - spanner_instance_admin.UpdateInstancePartitionRequest(), - instance_partition=spanner_instance_admin.InstancePartition(name='name_value'), - field_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_list_instance_partition_operations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instance_partition_operations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instance_partition_operations] = mock_rpc - - request = {} - client.list_instance_partition_operations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instance_partition_operations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instance_partition_operations_rest_required_fields(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instance_partition_operations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "instance_partition_deadline", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instance_partition_operations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instance_partition_operations_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instance_partition_operations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "instancePartitionDeadline", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instance_partition_operations_rest_flattened(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instance_partition_operations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/instances/*}/instancePartitionOperations" % client.transport._host, args[1]) - - -def test_list_instance_partition_operations_rest_flattened_error(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instance_partition_operations( - spanner_instance_admin.ListInstancePartitionOperationsRequest(), - parent='parent_value', - ) - - -def test_list_instance_partition_operations_rest_pager(transport: str = 'rest'): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - next_page_token='abc', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[], - next_page_token='def', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - ], - next_page_token='ghi', - ), - spanner_instance_admin.ListInstancePartitionOperationsResponse( - operations=[ - operations_pb2.Operation(), - operations_pb2.Operation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/instances/sample2'} - - pager = client.list_instance_partition_operations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, operations_pb2.Operation) - for i in results) - - pages = list(client.list_instance_partition_operations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_move_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.move_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc - - request = {} - client.move_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.move_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_move_instance_rest_required_fields(request_type=spanner_instance_admin.MoveInstanceRequest): - transport_class = transports.InstanceAdminRestTransport - - request_init = {} - request_init["name"] = "" - request_init["target_config"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["targetConfig"] = 'target_config_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "targetConfig" in jsonified_request - assert jsonified_request["targetConfig"] == 'target_config_value' - - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.move_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_move_instance_rest_unset_required_fields(): - transport = transports.InstanceAdminRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.move_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "targetConfig", ))) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = InstanceAdminClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = InstanceAdminClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.InstanceAdminGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.InstanceAdminGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - transports.InstanceAdminRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = InstanceAdminClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_configs_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - client.list_instance_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - call.return_value = spanner_instance_admin.InstanceConfig() - client.get_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_config_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - call.return_value = None - client.delete_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_config_operations_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - client.list_instance_config_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instances_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancesResponse() - client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partitions_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - client.list_instance_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = spanner_instance_admin.Instance() - client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value = None - client.delete_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - call.return_value = spanner_instance_admin.InstancePartition() - client.get_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - call.return_value = None - client.delete_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_partition_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partition_operations_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - call.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - client.list_instance_partition_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_instance_empty_call_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.move_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.MoveInstanceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = InstanceAdminAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_configs_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - )) - await client.list_instance_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - )) - await client.get_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_instance_config_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_config_operations_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_instance_config_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instances_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_partitions_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instance_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - )) - await client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - )) - await client.get_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_partition_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instance_partition_operations_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - )) - await client.list_instance_partition_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_move_instance_empty_call_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.move_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.MoveInstanceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = InstanceAdminClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_instance_configs_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_configs(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigsRequest, - dict, -]) -def test_list_instance_configs_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_configs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_configs_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_configs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_configs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstanceConfigsRequest.pb(spanner_instance_admin.ListInstanceConfigsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstanceConfigsResponse.to_json(spanner_instance_admin.ListInstanceConfigsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstanceConfigsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstanceConfigsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigsResponse(), metadata - - client.list_instance_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_config_rest_bad_request(request_type=spanner_instance_admin.GetInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceConfigRequest, - dict, -]) -def test_get_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstanceConfig( - name='name_value', - display_name='display_name_value', - config_type=spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED, - base_config='base_config_value', - etag='etag_value', - leader_options=['leader_options_value'], - reconciling=True, - state=spanner_instance_admin.InstanceConfig.State.CREATING, - free_instance_availability=spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE, - quorum_type=spanner_instance_admin.InstanceConfig.QuorumType.REGION, - storage_limit_per_processing_unit=3540, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstanceConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstanceConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.config_type == spanner_instance_admin.InstanceConfig.Type.GOOGLE_MANAGED - assert response.base_config == 'base_config_value' - assert response.etag == 'etag_value' - assert response.leader_options == ['leader_options_value'] - assert response.reconciling is True - assert response.state == spanner_instance_admin.InstanceConfig.State.CREATING - assert response.free_instance_availability == spanner_instance_admin.InstanceConfig.FreeInstanceAvailability.AVAILABLE - assert response.quorum_type == spanner_instance_admin.InstanceConfig.QuorumType.REGION - assert response.storage_limit_per_processing_unit == 3540 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.GetInstanceConfigRequest.pb(spanner_instance_admin.GetInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.InstanceConfig.to_json(spanner_instance_admin.InstanceConfig()) - req.return_value.content = return_value - - request = spanner_instance_admin.GetInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.InstanceConfig() - post_with_metadata.return_value = spanner_instance_admin.InstanceConfig(), metadata - - client.get_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_instance_config_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceConfigRequest, - dict, -]) -def test_create_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.CreateInstanceConfigRequest.pb(spanner_instance_admin.CreateInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.CreateInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_instance_config_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceConfigRequest, - dict, -]) -def test_update_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance_config': {'name': 'projects/sample1/instanceConfigs/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance_config(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.UpdateInstanceConfigRequest.pb(spanner_instance_admin.UpdateInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.UpdateInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_instance_config_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceConfigRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance_config(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceConfigRequest, - dict, -]) -def test_delete_instance_config_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instanceConfigs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance_config(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_config_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_config") as pre: - pre.assert_not_called() - pb_message = spanner_instance_admin.DeleteInstanceConfigRequest.pb(spanner_instance_admin.DeleteInstanceConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_instance_admin.DeleteInstanceConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_instance_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_instance_config_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstanceConfigOperationsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_config_operations(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstanceConfigOperationsRequest, - dict, -]) -def test_list_instance_config_operations_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_config_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstanceConfigOperationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_config_operations_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_config_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_config_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstanceConfigOperationsRequest.pb(spanner_instance_admin.ListInstanceConfigOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse.to_json(spanner_instance_admin.ListInstanceConfigOperationsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstanceConfigOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstanceConfigOperationsResponse(), metadata - - client.list_instance_config_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_instances_rest_bad_request(request_type=spanner_instance_admin.ListInstancesRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instances(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancesRequest, - dict, -]) -def test_list_instances_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instances") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstancesRequest.pb(spanner_instance_admin.ListInstancesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstancesResponse.to_json(spanner_instance_admin.ListInstancesResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstancesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstancesResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstancesResponse(), metadata - - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_instance_partitions_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_partitions(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionsRequest, - dict, -]) -def test_list_instance_partitions_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_partitions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_partitions_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partitions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partitions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstancePartitionsRequest.pb(spanner_instance_admin.ListInstancePartitionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstancePartitionsResponse.to_json(spanner_instance_admin.ListInstancePartitionsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstancePartitionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstancePartitionsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionsResponse(), metadata - - client.list_instance_partitions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_rest_bad_request(request_type=spanner_instance_admin.GetInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstanceRequest, - dict, -]) -def test_get_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.Instance( - name='name_value', - config='config_value', - display_name='display_name_value', - node_count=1070, - processing_units=1743, - state=spanner_instance_admin.Instance.State.CREATING, - instance_type=spanner_instance_admin.Instance.InstanceType.PROVISIONED, - endpoint_uris=['endpoint_uris_value'], - edition=spanner_instance_admin.Instance.Edition.STANDARD, - default_backup_schedule_type=spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.Instance) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.node_count == 1070 - assert response.processing_units == 1743 - assert response.state == spanner_instance_admin.Instance.State.CREATING - assert response.instance_type == spanner_instance_admin.Instance.InstanceType.PROVISIONED - assert response.endpoint_uris == ['endpoint_uris_value'] - assert response.edition == spanner_instance_admin.Instance.Edition.STANDARD - assert response.default_backup_schedule_type == spanner_instance_admin.Instance.DefaultBackupScheduleType.NONE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.GetInstanceRequest.pb(spanner_instance_admin.GetInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.Instance.to_json(spanner_instance_admin.Instance()) - req.return_value.content = return_value - - request = spanner_instance_admin.GetInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.Instance() - post_with_metadata.return_value = spanner_instance_admin.Instance(), metadata - - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_instance_rest_bad_request(request_type=spanner_instance_admin.CreateInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstanceRequest, - dict, -]) -def test_create_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.CreateInstanceRequest.pb(spanner_instance_admin.CreateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.CreateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_instance_rest_bad_request(request_type=spanner_instance_admin.UpdateInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstanceRequest, - dict, -]) -def test_update_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/instances/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.UpdateInstanceRequest.pb(spanner_instance_admin.UpdateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.UpdateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_instance_rest_bad_request(request_type=spanner_instance_admin.DeleteInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstanceRequest, - dict, -]) -def test_delete_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance") as pre: - pre.assert_not_called() - pb_message = spanner_instance_admin.DeleteInstanceRequest.pb(spanner_instance_admin.DeleteInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_instance_admin.DeleteInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_partition_rest_bad_request(request_type=spanner_instance_admin.GetInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.GetInstancePartitionRequest, - dict, -]) -def test_get_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.InstancePartition( - name='name_value', - config='config_value', - display_name='display_name_value', - state=spanner_instance_admin.InstancePartition.State.CREATING, - referencing_databases=['referencing_databases_value'], - referencing_backups=['referencing_backups_value'], - etag='etag_value', - node_count=1070, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.InstancePartition.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance_partition(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, spanner_instance_admin.InstancePartition) - assert response.name == 'name_value' - assert response.config == 'config_value' - assert response.display_name == 'display_name_value' - assert response.state == spanner_instance_admin.InstancePartition.State.CREATING - assert response.referencing_databases == ['referencing_databases_value'] - assert response.referencing_backups == ['referencing_backups_value'] - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_get_instance_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_get_instance_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.GetInstancePartitionRequest.pb(spanner_instance_admin.GetInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.InstancePartition.to_json(spanner_instance_admin.InstancePartition()) - req.return_value.content = return_value - - request = spanner_instance_admin.GetInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.InstancePartition() - post_with_metadata.return_value = spanner_instance_admin.InstancePartition(), metadata - - client.get_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_instance_partition_rest_bad_request(request_type=spanner_instance_admin.CreateInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.CreateInstancePartitionRequest, - dict, -]) -def test_create_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_instance_partition(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_create_instance_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_create_instance_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.CreateInstancePartitionRequest.pb(spanner_instance_admin.CreateInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.CreateInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_instance_partition_rest_bad_request(request_type=spanner_instance_admin.DeleteInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.DeleteInstancePartitionRequest, - dict, -]) -def test_delete_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_instance_partition(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_delete_instance_partition") as pre: - pre.assert_not_called() - pb_message = spanner_instance_admin.DeleteInstancePartitionRequest.pb(spanner_instance_admin.DeleteInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = spanner_instance_admin.DeleteInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_update_instance_partition_rest_bad_request(request_type=spanner_instance_admin.UpdateInstancePartitionRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance_partition(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.UpdateInstancePartitionRequest, - dict, -]) -def test_update_instance_partition_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance_partition': {'name': 'projects/sample1/instances/sample2/instancePartitions/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance_partition(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_partition_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_update_instance_partition_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_update_instance_partition") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.UpdateInstancePartitionRequest.pb(spanner_instance_admin.UpdateInstancePartitionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.UpdateInstancePartitionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance_partition(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_instance_partition_operations_rest_bad_request(request_type=spanner_instance_admin.ListInstancePartitionOperationsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instance_partition_operations(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.ListInstancePartitionOperationsRequest, - dict, -]) -def test_list_instance_partition_operations_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse( - next_page_token='next_page_token_value', - unreachable_instance_partitions=['unreachable_instance_partitions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instance_partition_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancePartitionOperationsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable_instance_partitions == ['unreachable_instance_partitions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instance_partition_operations_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_list_instance_partition_operations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_list_instance_partition_operations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.ListInstancePartitionOperationsRequest.pb(spanner_instance_admin.ListInstancePartitionOperationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse.to_json(spanner_instance_admin.ListInstancePartitionOperationsResponse()) - req.return_value.content = return_value - - request = spanner_instance_admin.ListInstancePartitionOperationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse() - post_with_metadata.return_value = spanner_instance_admin.ListInstancePartitionOperationsResponse(), metadata - - client.list_instance_partition_operations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_move_instance_rest_bad_request(request_type=spanner_instance_admin.MoveInstanceRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.move_instance(request) - - -@pytest.mark.parametrize("request_type", [ - spanner_instance_admin.MoveInstanceRequest, - dict, -]) -def test_move_instance_rest_call_success(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/instances/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.move_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_move_instance_rest_interceptors(null_interceptor): - transport = transports.InstanceAdminRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.InstanceAdminRestInterceptor(), - ) - client = InstanceAdminClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance") as post, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "post_move_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.InstanceAdminRestInterceptor, "pre_move_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = spanner_instance_admin.MoveInstanceRequest.pb(spanner_instance_admin.MoveInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = spanner_instance_admin.MoveInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.move_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/instances/sample2/databases/sample3/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/instances/sample2/databases/sample3/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_configs_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_configs), - '__call__') as call: - client.list_instance_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_config), - '__call__') as call: - client.get_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_config), - '__call__') as call: - client.create_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_config), - '__call__') as call: - client.update_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_config_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_config), - '__call__') as call: - client.delete_instance_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_config_operations_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_config_operations), - '__call__') as call: - client.list_instance_config_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstanceConfigOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instances_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partitions_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partitions), - '__call__') as call: - client.list_instance_partitions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - client.create_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - client.delete_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_partition), - '__call__') as call: - client.get_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.GetInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_instance_partition), - '__call__') as call: - client.create_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.CreateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance_partition), - '__call__') as call: - client.delete_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.DeleteInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_partition_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance_partition), - '__call__') as call: - client.update_instance_partition(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.UpdateInstancePartitionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instance_partition_operations_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instance_partition_operations), - '__call__') as call: - client.list_instance_partition_operations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.ListInstancePartitionOperationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_instance_empty_call_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_instance), - '__call__') as call: - client.move_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = spanner_instance_admin.MoveInstanceRequest() - - assert args[0] == request_msg - - -def test_instance_admin_rest_lro_client(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.InstanceAdminGrpcTransport, - ) - -def test_instance_admin_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.InstanceAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_instance_admin_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.InstanceAdminTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_instance_configs', - 'get_instance_config', - 'create_instance_config', - 'update_instance_config', - 'delete_instance_config', - 'list_instance_config_operations', - 'list_instances', - 'list_instance_partitions', - 'get_instance', - 'create_instance', - 'update_instance', - 'delete_instance', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_instance_partition', - 'create_instance_partition', - 'delete_instance_partition', - 'update_instance_partition', - 'list_instance_partition_operations', - 'move_instance', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_instance_admin_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id="octopus", - ) - - -def test_instance_admin_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.InstanceAdminTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceAdminTransport() - adc.assert_called_once() - - -def test_instance_admin_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceAdminClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - ], -) -def test_instance_admin_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/spanner.admin',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.InstanceAdminGrpcTransport, - transports.InstanceAdminGrpcAsyncIOTransport, - transports.InstanceAdminRestTransport, - ], -) -def test_instance_admin_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.InstanceAdminGrpcTransport, grpc_helpers), - (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_instance_admin_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "spanner.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/spanner.admin', -), - scopes=["1", "2"], - default_host="spanner.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_instance_admin_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.InstanceAdminRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_instance_admin_host_no_port(transport_name): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_instance_admin_host_with_port(transport_name): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='spanner.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'spanner.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://spanner.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_instance_admin_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = InstanceAdminClient( - credentials=creds1, - transport=transport_name, - ) - client2 = InstanceAdminClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_instance_configs._session - session2 = client2.transport.list_instance_configs._session - assert session1 != session2 - session1 = client1.transport.get_instance_config._session - session2 = client2.transport.get_instance_config._session - assert session1 != session2 - session1 = client1.transport.create_instance_config._session - session2 = client2.transport.create_instance_config._session - assert session1 != session2 - session1 = client1.transport.update_instance_config._session - session2 = client2.transport.update_instance_config._session - assert session1 != session2 - session1 = client1.transport.delete_instance_config._session - session2 = client2.transport.delete_instance_config._session - assert session1 != session2 - session1 = client1.transport.list_instance_config_operations._session - session2 = client2.transport.list_instance_config_operations._session - assert session1 != session2 - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.list_instance_partitions._session - session2 = client2.transport.list_instance_partitions._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.create_instance._session - session2 = client2.transport.create_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.delete_instance._session - session2 = client2.transport.delete_instance._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 - session1 = client1.transport.get_instance_partition._session - session2 = client2.transport.get_instance_partition._session - assert session1 != session2 - session1 = client1.transport.create_instance_partition._session - session2 = client2.transport.create_instance_partition._session - assert session1 != session2 - session1 = client1.transport.delete_instance_partition._session - session2 = client2.transport.delete_instance_partition._session - assert session1 != session2 - session1 = client1.transport.update_instance_partition._session - session2 = client2.transport.update_instance_partition._session - assert session1 != session2 - session1 = client1.transport.list_instance_partition_operations._session - session2 = client2.transport.list_instance_partition_operations._session - assert session1 != session2 - session1 = client1.transport.move_instance._session - session2 = client2.transport.move_instance._session - assert session1 != session2 -def test_instance_admin_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.InstanceAdminGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_instance_admin_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.InstanceAdminGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.InstanceAdminGrpcTransport, transports.InstanceAdminGrpcAsyncIOTransport]) -def test_instance_admin_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_instance_admin_grpc_lro_client(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_admin_grpc_lro_async_client(): - client = InstanceAdminAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - instance = "clam" - expected = "projects/{project}/instances/{instance}".format(project=project, instance=instance, ) - actual = InstanceAdminClient.instance_path(project, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "whelk", - "instance": "octopus", - } - path = InstanceAdminClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_path(path) - assert expected == actual - -def test_instance_config_path(): - project = "oyster" - instance_config = "nudibranch" - expected = "projects/{project}/instanceConfigs/{instance_config}".format(project=project, instance_config=instance_config, ) - actual = InstanceAdminClient.instance_config_path(project, instance_config) - assert expected == actual - - -def test_parse_instance_config_path(): - expected = { - "project": "cuttlefish", - "instance_config": "mussel", - } - path = InstanceAdminClient.instance_config_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_config_path(path) - assert expected == actual - -def test_instance_partition_path(): - project = "winkle" - instance = "nautilus" - instance_partition = "scallop" - expected = "projects/{project}/instances/{instance}/instancePartitions/{instance_partition}".format(project=project, instance=instance, instance_partition=instance_partition, ) - actual = InstanceAdminClient.instance_partition_path(project, instance, instance_partition) - assert expected == actual - - -def test_parse_instance_partition_path(): - expected = { - "project": "abalone", - "instance": "squid", - "instance_partition": "clam", - } - path = InstanceAdminClient.instance_partition_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_instance_partition_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = InstanceAdminClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = InstanceAdminClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = InstanceAdminClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = InstanceAdminClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = InstanceAdminClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = InstanceAdminClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = InstanceAdminClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = InstanceAdminClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = InstanceAdminClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = InstanceAdminClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = InstanceAdminClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.InstanceAdminTransport, '_prep_wrapped_messages') as prep: - transport_class = InstanceAdminClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = InstanceAdminAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = InstanceAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (InstanceAdminClient, transports.InstanceAdminGrpcTransport), - (InstanceAdminAsyncClient, transports.InstanceAdminGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )